% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 3831 100 3831 0 0 24740 0 --:--:-- --:--:-- --:--:-- 24876 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 9315 100 9315 0 0 50889 0 --:--:-- --:--:-- --:--:-- 51181 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 2607 100 2607 0 0 16580 0 --:--:-- --:--:-- --:--:-- 16500 100 2607 100 2607 0 0 16568 0 --:--:-- --:--:-- --:--:-- 16500 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 1953 100 1953 0 0 12702 0 --:--:-- --:--:-- --:--:-- 12764 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 350 100 350 0 0 2284 0 --:--:-- --:--:-- --:--:-- 2287 Installing kuttl Try 0... curl -sLo /tmp/jaeger-tests/hack/install/../../bin/kubectl-kuttl https://github.com/kudobuilder/kuttl/releases/download/v0.15.0/kubectl-kuttl_0.15.0_linux_x86_64 KUBECONFIG file is: /tmp/kubeconfig-1829818249 for suite in miscellaneous elasticsearch examples generate upgrade sidecar streaming ui; do \ make run-e2e-tests-$suite ; \ done make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh miscellaneous false true + '[' 3 -ne 3 ']' + test_suite_name=miscellaneous + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/miscellaneous.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-miscellaneous make[2]: Entering directory '/tmp/jaeger-tests' SKIP_ES_EXTERNAL=true ./tests/e2e/miscellaneous/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 12m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 12m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/miscellaneous/render.sh ++ export SUITE_DIR=./tests/e2e/miscellaneous ++ SUITE_DIR=./tests/e2e/miscellaneous ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/miscellaneous ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test collector-autoscale + '[' 1 -ne 1 ']' + test_name=collector-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-autoscale\e[0m' Rendering files for test collector-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p collector-autoscale + cd collector-autoscale + jaeger_name=simple-prod + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + ELASTICSEARCH_NODECOUNT=1 + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.resources.requests.memory="200m"' 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.autoscale=true 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.minReplicas=1 01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.maxReplicas=2 01-install.yaml + version_lt 1.30 1.23 ++ echo 1.30 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.30 '!=' 1.30 + rm ./03-assert.yaml + generate_otlp_e2e_tests http + test_protocol=http + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-http\e[0m' Rendering files for test collector-otlp-allinone-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-autoscale + '[' collector-autoscale '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-http + cd collector-otlp-allinone-http + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger http true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-http + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-http + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-http' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-http\e[0m' Rendering files for test collector-otlp-production-http + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-http + '[' collector-otlp-allinone-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-http + cd collector-otlp-production-http + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger http true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=http + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' http = grpc ']' + reporting_port=:4318 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=http + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + generate_otlp_e2e_tests grpc + test_protocol=grpc + is_secured=false + '[' true = true ']' + is_secured=true + start_test collector-otlp-allinone-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-allinone-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-allinone-grpc\e[0m' Rendering files for test collector-otlp-allinone-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-http + '[' collector-otlp-production-http '!=' _build ']' + cd .. + mkdir -p collector-otlp-allinone-grpc + cd collector-otlp-allinone-grpc + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 01 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + start_test collector-otlp-production-grpc + '[' 1 -ne 1 ']' + test_name=collector-otlp-production-grpc + echo =========================================================================== =========================================================================== + info 'Rendering files for test collector-otlp-production-grpc' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test collector-otlp-production-grpc\e[0m' Rendering files for test collector-otlp-production-grpc + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-allinone-grpc + '[' collector-otlp-allinone-grpc '!=' _build ']' + cd .. + mkdir -p collector-otlp-production-grpc + cd collector-otlp-production-grpc + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + render_otlp_smoke_test my-jaeger grpc true 02 + '[' 4 -ne 4 ']' + jaeger=my-jaeger + reporting_protocol=grpc + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template + '[' grpc = grpc ']' + reporting_port=:4317 + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + REPORTING_PROTOCOL=grpc + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/otlp-smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset OTEL_EXPORTER_OTLP_ENDPOINT + '[' true = true ']' + skip_test istio 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=istio + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/collector-otlp-production-grpc + '[' collector-otlp-production-grpc '!=' _build ']' + cd .. + rm -rf istio + warning 'istio: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: istio: Test not supported in OpenShift\e[0m' WAR: istio: Test not supported in OpenShift + '[' true = true ']' + skip_test outside-cluster 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=outside-cluster + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + rm -rf outside-cluster + warning 'outside-cluster: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: outside-cluster: Test not supported in OpenShift\e[0m' WAR: outside-cluster: Test not supported in OpenShift + start_test set-custom-img + '[' 1 -ne 1 ']' + test_name=set-custom-img + echo =========================================================================== =========================================================================== + info 'Rendering files for test set-custom-img' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test set-custom-img\e[0m' Rendering files for test set-custom-img + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build + '[' _build '!=' _build ']' + mkdir -p set-custom-img + cd set-custom-img + jaeger_name=my-jaeger + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.collector.image="test"' ./02-install.yaml + '[' true = true ']' + skip_test non-cluster-wide 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=non-cluster-wide + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/miscellaneous/_build/set-custom-img + '[' set-custom-img '!=' _build ']' + cd .. + rm -rf non-cluster-wide + warning 'non-cluster-wide: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: non-cluster-wide: Test not supported in OpenShift\e[0m' WAR: non-cluster-wide: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running miscellaneous E2E tests' Running miscellaneous E2E tests + cd tests/e2e/miscellaneous/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1829818249 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-rosa-h-a4mj.qd6c.s3.devshift.org:443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 8 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/cassandra-spark === PAUSE kuttl/harness/cassandra-spark === RUN kuttl/harness/collector-autoscale === PAUSE kuttl/harness/collector-autoscale === RUN kuttl/harness/collector-otlp-allinone-grpc === PAUSE kuttl/harness/collector-otlp-allinone-grpc === RUN kuttl/harness/collector-otlp-allinone-http === PAUSE kuttl/harness/collector-otlp-allinone-http === RUN kuttl/harness/collector-otlp-production-grpc === PAUSE kuttl/harness/collector-otlp-production-grpc === RUN kuttl/harness/collector-otlp-production-http === PAUSE kuttl/harness/collector-otlp-production-http === RUN kuttl/harness/set-custom-img === PAUSE kuttl/harness/set-custom-img === CONT kuttl/harness/artifacts logger.go:42: 00:58:12 | artifacts | Creating namespace: kuttl-test-happy-cowbird logger.go:42: 00:58:12 | artifacts | artifacts events from ns kuttl-test-happy-cowbird: logger.go:42: 00:58:12 | artifacts | Deleting namespace: kuttl-test-happy-cowbird === CONT kuttl/harness/collector-otlp-allinone-http logger.go:42: 00:58:18 | collector-otlp-allinone-http | Creating namespace: kuttl-test-prime-quetzal logger.go:42: 00:58:18 | collector-otlp-allinone-http/0-install | starting test step 0-install logger.go:42: 00:58:18 | collector-otlp-allinone-http/0-install | Jaeger:kuttl-test-prime-quetzal/my-jaeger created logger.go:42: 00:58:28 | collector-otlp-allinone-http/0-install | test step completed 0-install logger.go:42: 00:58:28 | collector-otlp-allinone-http/1-smoke-test | starting test step 1-smoke-test logger.go:42: 00:58:28 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 00:58:31 | collector-otlp-allinone-http/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 00:58:40 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=http ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4318 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 00:58:41 | collector-otlp-allinone-http/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 00:58:41 | collector-otlp-allinone-http/1-smoke-test | job.batch/report-span created logger.go:42: 00:58:41 | collector-otlp-allinone-http/1-smoke-test | job.batch/check-span created logger.go:42: 00:58:59 | collector-otlp-allinone-http/1-smoke-test | test step completed 1-smoke-test logger.go:42: 00:58:59 | collector-otlp-allinone-http | collector-otlp-allinone-http events from ns kuttl-test-prime-quetzal: logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:19 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2 Binding Scheduled Successfully assigned kuttl-test-prime-quetzal/my-jaeger-fd495d56f-9cdt2 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:19 +0000 UTC Normal ReplicaSet.apps my-jaeger-fd495d56f SuccessfulCreate Created pod: my-jaeger-fd495d56f-9cdt2 replicaset-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:19 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-fd495d56f to 1 deployment-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:20 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2 AddedInterface Add eth0 [10.130.0.19/23] from ovn-kubernetes multus logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:20 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{jaeger} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:23 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{jaeger} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" in 3.108s (3.108s including waiting) kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:23 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:23 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:23 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{oauth-proxy} Pulling Pulling image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:26 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{oauth-proxy} Pulled Successfully pulled image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" in 3.403s (3.403s including waiting) kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:26 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:26 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:33 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-fd495d56f to 0 from 1 deployment-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:34 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:34 +0000 UTC Normal Pod my-jaeger-fd495d56f-9cdt2.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:34 +0000 UTC Normal ReplicaSet.apps my-jaeger-fd495d56f SuccessfulDelete Deleted pod: my-jaeger-fd495d56f-9cdt2 replicaset-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66 Binding Scheduled Successfully assigned kuttl-test-prime-quetzal/my-jaeger-6694dd5fcd-tww66 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66 AddedInterface Add eth0 [10.130.0.20/23] from ovn-kubernetes multus logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Pod my-jaeger-6694dd5fcd-tww66.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal ReplicaSet.apps my-jaeger-6694dd5fcd SuccessfulCreate Created pod: my-jaeger-6694dd5fcd-tww66 replicaset-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:35 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6694dd5fcd to 1 deployment-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:41 +0000 UTC Normal Pod check-span-dfplp Binding Scheduled Successfully assigned kuttl-test-prime-quetzal/check-span-dfplp to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:41 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-dfplp job-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:41 +0000 UTC Normal Pod report-span-ttrrg Binding Scheduled Successfully assigned kuttl-test-prime-quetzal/report-span-ttrrg to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:41 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-ttrrg job-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:42 +0000 UTC Normal Pod check-span-dfplp AddedInterface Add eth0 [10.130.0.22/23] from ovn-kubernetes multus logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:42 +0000 UTC Normal Pod check-span-dfplp.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:42 +0000 UTC Normal Pod report-span-ttrrg AddedInterface Add eth0 [10.130.0.21/23] from ovn-kubernetes multus logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:42 +0000 UTC Normal Pod report-span-ttrrg.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:47 +0000 UTC Normal Pod check-span-dfplp.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 4.608s (4.608s including waiting) kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:47 +0000 UTC Normal Pod check-span-dfplp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:47 +0000 UTC Normal Pod check-span-dfplp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:47 +0000 UTC Normal Pod report-span-ttrrg.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 4.972s (4.972s including waiting) kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:47 +0000 UTC Normal Pod report-span-ttrrg.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:47 +0000 UTC Normal Pod report-span-ttrrg.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 00:58:59 | collector-otlp-allinone-http | 2024-09-19 00:58:58 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 00:58:59 | collector-otlp-allinone-http | Deleting namespace: kuttl-test-prime-quetzal === CONT kuttl/harness/set-custom-img logger.go:42: 00:59:06 | set-custom-img | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 00:59:06 | set-custom-img | Ignoring check-collector-img.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 00:59:06 | set-custom-img | Creating namespace: kuttl-test-ideal-reindeer logger.go:42: 00:59:06 | set-custom-img/1-install | starting test step 1-install logger.go:42: 00:59:06 | set-custom-img/1-install | Jaeger:kuttl-test-ideal-reindeer/my-jaeger created logger.go:42: 01:09:07 | set-custom-img/1-install | test step failed 1-install case.go:364: failed in step 1-install case.go:366: --- Deployment:kuttl-test-ideal-reindeer/my-jaeger-collector +++ Deployment:kuttl-test-ideal-reindeer/my-jaeger-collector @@ -1,10 +1,412 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"d92c6d47-da97-473a-9d16-84fe9970502e"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-collector"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"COLLECTOR_OTLP_ENABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"COLLECTOR_ZIPKIN_HOST_PORT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":4317,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":4318,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":9411,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14250,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14267,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14268,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14269,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/jaeger/sampling"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls-config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-collector-tls-config-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-sampling-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T00:59:13Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T00:59:13Z" name: my-jaeger-collector namespace: kuttl-test-ideal-reindeer + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: my-jaeger + uid: d92c6d47-da97-473a-9d16-84fe9970502e spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --collector.grpc.tls.cert=/etc/tls-config/tls.crt + - --collector.grpc.tls.enabled=true + - --collector.grpc.tls.key=/etc/tls-config/tls.key + - --sampling.strategies-file=/etc/jaeger/sampling/sampling.json + - --es.server-urls=https://elasticsearch.kuttl-test-ideal-reindeer.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: COLLECTOR_ZIPKIN_HOST_PORT + value: :9411 + - name: COLLECTOR_OTLP_ENABLED + value: "true" + image: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-collector + ports: + - containerPort: 9411 + name: zipkin + protocol: TCP + - containerPort: 14267 + name: c-tchan-trft + protocol: TCP + - containerPort: 14268 + name: c-binary-trft + protocol: TCP + - containerPort: 14269 + name: admin-http + protocol: TCP + - containerPort: 14250 + name: grpc + protocol: TCP + - containerPort: 4317 + name: grpc-otlp + protocol: TCP + - containerPort: 4318 + name: http-otlp + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/jaeger/sampling + name: my-jaeger-sampling-configuration-volume + readOnly: true + - mountPath: /etc/tls-config + name: my-jaeger-collector-tls-config-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: my-jaeger + serviceAccountName: my-jaeger + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: sampling + path: sampling.json + name: my-jaeger-sampling-configuration + name: my-jaeger-sampling-configuration-volume + - name: my-jaeger-collector-tls-config-volume + secret: + defaultMode: 420 + secretName: my-jaeger-collector-headless-tls + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: my-jaeger-trusted-ca + name: my-jaeger-trusted-ca + - name: certs + secret: + defaultMode: 420 + secretName: my-jaeger-jaeger-elasticsearch status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T00:59:13Z" + lastUpdateTime: "2024-09-19T00:59:13Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T00:59:13Z" + lastUpdateTime: "2024-09-19T00:59:13Z" + message: ReplicaSet "my-jaeger-collector-5d85568dbd" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 1 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-ideal-reindeer/my-jaeger-collector: .status.readyReplicas: key is missing from map case.go:366: --- Deployment:kuttl-test-ideal-reindeer/my-jaeger-query +++ Deployment:kuttl-test-ideal-reindeer/my-jaeger-query @@ -1,10 +1,508 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.jaegertracing.io/inject: my-jaeger + sidecar.jaegertracing.io/revision: "0" + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + sidecar.jaegertracing.io/injected: my-jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.jaegertracing.io/inject: {} + f:sidecar.jaegertracing.io/revision: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"d92c6d47-da97-473a-9d16-84fe9970502e"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:sidecar.jaegertracing.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-query"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"JAEGER_DISABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"METRICS_STORAGE_TYPE"}: + .: {} + f:name: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":16685,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16686,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16687,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"name":"oauth-proxy"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":8443,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls/private"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-ui-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-ui-oauth-proxy-tls"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:04:13Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:04:13Z" name: my-jaeger-query namespace: kuttl-test-ideal-reindeer + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: my-jaeger + uid: d92c6d47-da97-473a-9d16-84fe9970502e spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + sidecar.jaegertracing.io/inject: my-jaeger + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --query.ui-config=/etc/config/ui.json + - --es.server-urls=https://elasticsearch.kuttl-test-ideal-reindeer.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: METRICS_STORAGE_TYPE + - name: JAEGER_DISABLED + value: "false" + - name: JAEGER_SERVICE_NAME + value: my-jaeger.kuttl-test-ideal-reindeer + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-query + ports: + - containerPort: 16685 + name: grpc-query + protocol: TCP + - containerPort: 16686 + name: query + protocol: TCP + - containerPort: 16687 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/config + name: my-jaeger-ui-configuration-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + - args: + - --cookie-secret=me79wG3mEHOH7NYfK5a37I + - --https-address=:8443 + - '--openshift-sar={"namespace": "kuttl-test-ideal-reindeer", "resource": + "pods", "verb": "get"}' + - --openshift-service-account=my-jaeger-ui-proxy + - --provider=openshift + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:16686 + env: + - name: JAEGER_SERVICE_NAME + value: my-jaeger.kuttl-test-ideal-reindeer + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508 + imagePullPolicy: IfNotPresent + name: oauth-proxy + ports: + - containerPort: 8443 + name: public + protocol: TCP + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/tls/private + name: my-jaeger-ui-oauth-proxy-tls + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - args: + - --agent.tags=cluster=undefined,deployment.name=my-jaeger-query,host.ip=${HOST_IP:},pod.name=${POD_NAME:},pod.namespace=kuttl-test-ideal-reindeer + - --reporter.grpc.host-port=dns:///my-jaeger-collector-headless.kuttl-test-ideal-reindeer.svc:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + env: + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: HOST_IP + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: status.hostIP + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent + ports: + - containerPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: my-jaeger-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: my-jaeger-ui-proxy + serviceAccountName: my-jaeger-ui-proxy + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: ui + path: ui.json + name: my-jaeger-ui-configuration + name: my-jaeger-ui-configuration-volume + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: my-jaeger-trusted-ca + name: my-jaeger-trusted-ca + - name: my-jaeger-ui-oauth-proxy-tls + secret: + defaultMode: 420 + secretName: my-jaeger-ui-oauth-proxy-tls + - name: certs + secret: + defaultMode: 420 + secretName: my-jaeger-jaeger-elasticsearch + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: my-jaeger-service-ca + name: my-jaeger-service-ca status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T00:59:13Z" + lastUpdateTime: "2024-09-19T00:59:13Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T00:59:13Z" + lastUpdateTime: "2024-09-19T00:59:13Z" + message: ReplicaSet "my-jaeger-query-6cd99886d4" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 2 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-ideal-reindeer/my-jaeger-query: .status.readyReplicas: key is missing from map logger.go:42: 01:09:07 | set-custom-img | set-custom-img events from ns kuttl-test-ideal-reindeer: logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Pod my-jaeger-collector-5d85568dbd-ftgdl Binding Scheduled Successfully assigned kuttl-test-ideal-reindeer/my-jaeger-collector-5d85568dbd-ftgdl to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Pod my-jaeger-collector-5d85568dbd-ftgdl AddedInterface Add eth0 [10.130.0.23/23] from ovn-kubernetes multus logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Pod my-jaeger-collector-5d85568dbd-ftgdl.spec.containers{jaeger-collector} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-5d85568dbd SuccessfulCreate Created pod: my-jaeger-collector-5d85568dbd-ftgdl replicaset-controller logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-5d85568dbd to 1 deployment-controller logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp Binding Scheduled Successfully assigned kuttl-test-ideal-reindeer/my-jaeger-query-6cd99886d4-7gpfp to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp AddedInterface Add eth0 [10.130.0.24/23] from ovn-kubernetes multus logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-6cd99886d4 SuccessfulCreate Created pod: my-jaeger-query-6cd99886d4-7gpfp replicaset-controller logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:13 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-6cd99886d4 to 1 deployment-controller logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:19 +0000 UTC Normal Pod my-jaeger-collector-5d85568dbd-ftgdl.spec.containers{jaeger-collector} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" in 6.078s (6.078s including waiting) kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:19 +0000 UTC Normal Pod my-jaeger-collector-5d85568dbd-ftgdl.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:20 +0000 UTC Normal Pod my-jaeger-collector-5d85568dbd-ftgdl.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Warning Pod my-jaeger-collector-5d85568dbd-ftgdl.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" in 7.206s (7.206s including waiting) kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:21 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:25 +0000 UTC Normal Pod my-jaeger-collector-5d85568dbd-ftgdl.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:27 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" in 6.427s (6.427s including waiting) kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:27 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:27 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:28 +0000 UTC Normal Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:29 +0000 UTC Warning Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:31 +0000 UTC Warning Pod my-jaeger-collector-5d85568dbd-ftgdl.spec.containers{jaeger-collector} BackOff Back-off restarting failed container jaeger-collector in pod my-jaeger-collector-5d85568dbd-ftgdl_kuttl-test-ideal-reindeer(90ba1428-9fcf-4f84-809a-07db6437b516) kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:33 +0000 UTC Warning Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} Unhealthy Readiness probe failed: Get "http://10.130.0.24:16687/": dial tcp 10.130.0.24:16687: connect: connection refused kubelet logger.go:42: 01:09:07 | set-custom-img | 2024-09-19 00:59:34 +0000 UTC Warning Pod my-jaeger-query-6cd99886d4-7gpfp.spec.containers{jaeger-query} BackOff Back-off restarting failed container jaeger-query in pod my-jaeger-query-6cd99886d4-7gpfp_kuttl-test-ideal-reindeer(94ba5da1-0f78-4916-a4ca-ba4b93635e63) kubelet logger.go:42: 01:09:07 | set-custom-img | Deleting namespace: kuttl-test-ideal-reindeer === CONT kuttl/harness/collector-otlp-production-http logger.go:42: 01:09:13 | collector-otlp-production-http | Creating namespace: kuttl-test-kind-ringtail logger.go:42: 01:09:13 | collector-otlp-production-http/1-install | starting test step 1-install logger.go:42: 01:09:14 | collector-otlp-production-http/1-install | Jaeger:kuttl-test-kind-ringtail/my-jaeger created logger.go:42: 01:19:14 | collector-otlp-production-http/1-install | test step failed 1-install case.go:364: failed in step 1-install case.go:366: --- Deployment:kuttl-test-kind-ringtail/my-jaeger-collector +++ Deployment:kuttl-test-kind-ringtail/my-jaeger-collector @@ -1,10 +1,412 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"50573c85-a9d1-418b-8f57-5b3bb2801523"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-collector"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"COLLECTOR_OTLP_ENABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"COLLECTOR_ZIPKIN_HOST_PORT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":4317,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":4318,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":9411,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14250,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14267,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14268,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14269,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/jaeger/sampling"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls-config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-collector-tls-config-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-sampling-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:09:18Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:09:18Z" name: my-jaeger-collector namespace: kuttl-test-kind-ringtail + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: my-jaeger + uid: 50573c85-a9d1-418b-8f57-5b3bb2801523 spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --collector.grpc.tls.cert=/etc/tls-config/tls.crt + - --collector.grpc.tls.enabled=true + - --collector.grpc.tls.key=/etc/tls-config/tls.key + - --sampling.strategies-file=/etc/jaeger/sampling/sampling.json + - --es.server-urls=https://elasticsearch.kuttl-test-kind-ringtail.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: COLLECTOR_ZIPKIN_HOST_PORT + value: :9411 + - name: COLLECTOR_OTLP_ENABLED + value: "true" + image: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-collector + ports: + - containerPort: 9411 + name: zipkin + protocol: TCP + - containerPort: 14267 + name: c-tchan-trft + protocol: TCP + - containerPort: 14268 + name: c-binary-trft + protocol: TCP + - containerPort: 14269 + name: admin-http + protocol: TCP + - containerPort: 14250 + name: grpc + protocol: TCP + - containerPort: 4317 + name: grpc-otlp + protocol: TCP + - containerPort: 4318 + name: http-otlp + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/jaeger/sampling + name: my-jaeger-sampling-configuration-volume + readOnly: true + - mountPath: /etc/tls-config + name: my-jaeger-collector-tls-config-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: my-jaeger + serviceAccountName: my-jaeger + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: sampling + path: sampling.json + name: my-jaeger-sampling-configuration + name: my-jaeger-sampling-configuration-volume + - name: my-jaeger-collector-tls-config-volume + secret: + defaultMode: 420 + secretName: my-jaeger-collector-headless-tls + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: my-jaeger-trusted-ca + name: my-jaeger-trusted-ca + - name: certs + secret: + defaultMode: 420 + secretName: my-jaeger-jaeger-elasticsearch status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:09:18Z" + lastUpdateTime: "2024-09-19T01:09:18Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:09:18Z" + lastUpdateTime: "2024-09-19T01:09:18Z" + message: ReplicaSet "my-jaeger-collector-db956c455" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 1 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-kind-ringtail/my-jaeger-collector: .status.readyReplicas: key is missing from map case.go:366: --- Deployment:kuttl-test-kind-ringtail/my-jaeger-query +++ Deployment:kuttl-test-kind-ringtail/my-jaeger-query @@ -1,10 +1,508 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.jaegertracing.io/inject: my-jaeger + sidecar.jaegertracing.io/revision: "1" + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + sidecar.jaegertracing.io/injected: my-jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.jaegertracing.io/inject: {} + f:sidecar.jaegertracing.io/revision: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"50573c85-a9d1-418b-8f57-5b3bb2801523"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:sidecar.jaegertracing.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-query"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"JAEGER_DISABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"METRICS_STORAGE_TYPE"}: + .: {} + f:name: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":16685,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16686,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16687,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"name":"oauth-proxy"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":8443,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls/private"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-ui-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-ui-oauth-proxy-tls"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:14:19Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:14:19Z" name: my-jaeger-query namespace: kuttl-test-kind-ringtail + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: my-jaeger + uid: 50573c85-a9d1-418b-8f57-5b3bb2801523 spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + sidecar.jaegertracing.io/inject: my-jaeger + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --query.ui-config=/etc/config/ui.json + - --es.server-urls=https://elasticsearch.kuttl-test-kind-ringtail.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: METRICS_STORAGE_TYPE + - name: JAEGER_DISABLED + value: "false" + - name: JAEGER_SERVICE_NAME + value: my-jaeger.kuttl-test-kind-ringtail + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-query + ports: + - containerPort: 16685 + name: grpc-query + protocol: TCP + - containerPort: 16686 + name: query + protocol: TCP + - containerPort: 16687 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/config + name: my-jaeger-ui-configuration-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + - args: + - --cookie-secret=8QD0JNlk99FmlhCoRsOxaJ + - --https-address=:8443 + - '--openshift-sar={"namespace": "kuttl-test-kind-ringtail", "resource": "pods", + "verb": "get"}' + - --openshift-service-account=my-jaeger-ui-proxy + - --provider=openshift + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:16686 + env: + - name: JAEGER_SERVICE_NAME + value: my-jaeger.kuttl-test-kind-ringtail + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508 + imagePullPolicy: IfNotPresent + name: oauth-proxy + ports: + - containerPort: 8443 + name: public + protocol: TCP + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/tls/private + name: my-jaeger-ui-oauth-proxy-tls + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - args: + - --agent.tags=cluster=undefined,deployment.name=my-jaeger-query,host.ip=${HOST_IP:},pod.name=${POD_NAME:},pod.namespace=kuttl-test-kind-ringtail + - --reporter.grpc.host-port=dns:///my-jaeger-collector-headless.kuttl-test-kind-ringtail.svc:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + env: + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: HOST_IP + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: status.hostIP + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent + ports: + - containerPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: my-jaeger-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: my-jaeger-ui-proxy + serviceAccountName: my-jaeger-ui-proxy + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: ui + path: ui.json + name: my-jaeger-ui-configuration + name: my-jaeger-ui-configuration-volume + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: my-jaeger-trusted-ca + name: my-jaeger-trusted-ca + - name: my-jaeger-ui-oauth-proxy-tls + secret: + defaultMode: 420 + secretName: my-jaeger-ui-oauth-proxy-tls + - name: certs + secret: + defaultMode: 420 + secretName: my-jaeger-jaeger-elasticsearch + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: my-jaeger-service-ca + name: my-jaeger-service-ca status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:09:18Z" + lastUpdateTime: "2024-09-19T01:09:18Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:09:18Z" + lastUpdateTime: "2024-09-19T01:09:18Z" + message: ReplicaSet "my-jaeger-query-5b4998c474" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 3 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-kind-ringtail/my-jaeger-query: .status.readyReplicas: key is missing from map logger.go:42: 01:19:14 | collector-otlp-production-http | collector-otlp-production-http events from ns kuttl-test-kind-ringtail: logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal Pod my-jaeger-collector-db956c455-dj9gf Binding Scheduled Successfully assigned kuttl-test-kind-ringtail/my-jaeger-collector-db956c455-dj9gf to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal Pod my-jaeger-collector-db956c455-dj9gf AddedInterface Add eth0 [10.130.0.26/23] from ovn-kubernetes multus logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-db956c455 SuccessfulCreate Created pod: my-jaeger-collector-db956c455-dj9gf replicaset-controller logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-db956c455 to 1 deployment-controller logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj Binding Scheduled Successfully assigned kuttl-test-kind-ringtail/my-jaeger-query-5b4998c474-k6pgj to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj AddedInterface Add eth0 [10.130.0.27/23] from ovn-kubernetes multus logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-5b4998c474 SuccessfulCreate Created pod: my-jaeger-query-5b4998c474-k6pgj replicaset-controller logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:18 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-5b4998c474 to 1 deployment-controller logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-collector-db956c455-dj9gf.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-collector-db956c455-dj9gf.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-collector-db956c455-dj9gf.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:19 +0000 UTC Normal Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:20 +0000 UTC Warning Pod my-jaeger-collector-db956c455-dj9gf.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:20 +0000 UTC Warning Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-query} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:30 +0000 UTC Warning Pod my-jaeger-collector-db956c455-dj9gf.spec.containers{jaeger-collector} BackOff Back-off restarting failed container jaeger-collector in pod my-jaeger-collector-db956c455-dj9gf_kuttl-test-kind-ringtail(0020a710-1b75-4be4-8681-6c4549cc96dd) kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:30 +0000 UTC Warning Pod my-jaeger-query-5b4998c474-k6pgj.spec.containers{jaeger-query} BackOff Back-off restarting failed container jaeger-query in pod my-jaeger-query-5b4998c474-k6pgj_kuttl-test-kind-ringtail(a4cf4caa-e566-4ffe-a10a-200a71a1aa85) kubelet logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:19:14 | collector-otlp-production-http | 2024-09-19 01:09:33 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:19:14 | collector-otlp-production-http | Deleting namespace: kuttl-test-kind-ringtail === CONT kuttl/harness/collector-otlp-production-grpc logger.go:42: 01:19:20 | collector-otlp-production-grpc | Creating namespace: kuttl-test-correct-pika logger.go:42: 01:19:20 | collector-otlp-production-grpc/1-install | starting test step 1-install logger.go:42: 01:19:21 | collector-otlp-production-grpc/1-install | Jaeger:kuttl-test-correct-pika/my-jaeger created logger.go:42: 01:29:22 | collector-otlp-production-grpc/1-install | test step failed 1-install case.go:364: failed in step 1-install case.go:366: --- Deployment:kuttl-test-correct-pika/my-jaeger-collector +++ Deployment:kuttl-test-correct-pika/my-jaeger-collector @@ -1,10 +1,412 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"9664276d-9ddc-4456-8eee-3b451c9492da"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-collector"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"COLLECTOR_OTLP_ENABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"COLLECTOR_ZIPKIN_HOST_PORT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":4317,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":4318,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":9411,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14250,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14267,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14268,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14269,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/jaeger/sampling"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls-config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-collector-tls-config-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-sampling-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:19:26Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:19:26Z" name: my-jaeger-collector namespace: kuttl-test-correct-pika + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: my-jaeger + uid: 9664276d-9ddc-4456-8eee-3b451c9492da spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-collector + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --collector.grpc.tls.cert=/etc/tls-config/tls.crt + - --collector.grpc.tls.enabled=true + - --collector.grpc.tls.key=/etc/tls-config/tls.key + - --sampling.strategies-file=/etc/jaeger/sampling/sampling.json + - --es.server-urls=https://elasticsearch.kuttl-test-correct-pika.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: COLLECTOR_ZIPKIN_HOST_PORT + value: :9411 + - name: COLLECTOR_OTLP_ENABLED + value: "true" + image: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-collector + ports: + - containerPort: 9411 + name: zipkin + protocol: TCP + - containerPort: 14267 + name: c-tchan-trft + protocol: TCP + - containerPort: 14268 + name: c-binary-trft + protocol: TCP + - containerPort: 14269 + name: admin-http + protocol: TCP + - containerPort: 14250 + name: grpc + protocol: TCP + - containerPort: 4317 + name: grpc-otlp + protocol: TCP + - containerPort: 4318 + name: http-otlp + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/jaeger/sampling + name: my-jaeger-sampling-configuration-volume + readOnly: true + - mountPath: /etc/tls-config + name: my-jaeger-collector-tls-config-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: my-jaeger + serviceAccountName: my-jaeger + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: sampling + path: sampling.json + name: my-jaeger-sampling-configuration + name: my-jaeger-sampling-configuration-volume + - name: my-jaeger-collector-tls-config-volume + secret: + defaultMode: 420 + secretName: my-jaeger-collector-headless-tls + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: my-jaeger-trusted-ca + name: my-jaeger-trusted-ca + - name: certs + secret: + defaultMode: 420 + secretName: my-jaeger-jaeger-elasticsearch status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:19:26Z" + lastUpdateTime: "2024-09-19T01:19:26Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:19:26Z" + lastUpdateTime: "2024-09-19T01:19:26Z" + message: ReplicaSet "my-jaeger-collector-745c6c58f6" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 1 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-correct-pika/my-jaeger-collector: .status.readyReplicas: key is missing from map case.go:366: --- Deployment:kuttl-test-correct-pika/my-jaeger-query +++ Deployment:kuttl-test-correct-pika/my-jaeger-query @@ -1,10 +1,508 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.jaegertracing.io/inject: my-jaeger + sidecar.jaegertracing.io/revision: "1" + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + sidecar.jaegertracing.io/injected: my-jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.jaegertracing.io/inject: {} + f:sidecar.jaegertracing.io/revision: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"9664276d-9ddc-4456-8eee-3b451c9492da"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:sidecar.jaegertracing.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-query"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"JAEGER_DISABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"METRICS_STORAGE_TYPE"}: + .: {} + f:name: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":16685,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16686,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16687,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"name":"oauth-proxy"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":8443,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls/private"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"my-jaeger-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-ui-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"my-jaeger-ui-oauth-proxy-tls"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:24:27Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:24:27Z" name: my-jaeger-query namespace: kuttl-test-correct-pika + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: my-jaeger + uid: 9664276d-9ddc-4456-8eee-3b451c9492da spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + sidecar.jaegertracing.io/inject: my-jaeger + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: my-jaeger + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: my-jaeger-query + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --query.ui-config=/etc/config/ui.json + - --es.server-urls=https://elasticsearch.kuttl-test-correct-pika.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: METRICS_STORAGE_TYPE + - name: JAEGER_DISABLED + value: "false" + - name: JAEGER_SERVICE_NAME + value: my-jaeger.kuttl-test-correct-pika + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-query + ports: + - containerPort: 16685 + name: grpc-query + protocol: TCP + - containerPort: 16686 + name: query + protocol: TCP + - containerPort: 16687 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/config + name: my-jaeger-ui-configuration-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + - args: + - --cookie-secret=lvRzqQ8M1xuIiMoSrXvbiu + - --https-address=:8443 + - '--openshift-sar={"namespace": "kuttl-test-correct-pika", "resource": "pods", + "verb": "get"}' + - --openshift-service-account=my-jaeger-ui-proxy + - --provider=openshift + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:16686 + env: + - name: JAEGER_SERVICE_NAME + value: my-jaeger.kuttl-test-correct-pika + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508 + imagePullPolicy: IfNotPresent + name: oauth-proxy + ports: + - containerPort: 8443 + name: public + protocol: TCP + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/tls/private + name: my-jaeger-ui-oauth-proxy-tls + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - args: + - --agent.tags=cluster=undefined,deployment.name=my-jaeger-query,host.ip=${HOST_IP:},pod.name=${POD_NAME:},pod.namespace=kuttl-test-correct-pika + - --reporter.grpc.host-port=dns:///my-jaeger-collector-headless.kuttl-test-correct-pika.svc:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + env: + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: HOST_IP + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: status.hostIP + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent + ports: + - containerPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: my-jaeger-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: my-jaeger-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: my-jaeger-ui-proxy + serviceAccountName: my-jaeger-ui-proxy + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: ui + path: ui.json + name: my-jaeger-ui-configuration + name: my-jaeger-ui-configuration-volume + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: my-jaeger-trusted-ca + name: my-jaeger-trusted-ca + - name: my-jaeger-ui-oauth-proxy-tls + secret: + defaultMode: 420 + secretName: my-jaeger-ui-oauth-proxy-tls + - name: certs + secret: + defaultMode: 420 + secretName: my-jaeger-jaeger-elasticsearch + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: my-jaeger-service-ca + name: my-jaeger-service-ca status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:19:26Z" + lastUpdateTime: "2024-09-19T01:19:26Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:19:26Z" + lastUpdateTime: "2024-09-19T01:19:26Z" + message: ReplicaSet "my-jaeger-query-7c7b7c649b" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 3 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-correct-pika/my-jaeger-query: .status.readyReplicas: key is missing from map logger.go:42: 01:29:22 | collector-otlp-production-grpc | collector-otlp-production-grpc events from ns kuttl-test-correct-pika: logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:26 +0000 UTC Normal Pod my-jaeger-collector-745c6c58f6-gftzq Binding Scheduled Successfully assigned kuttl-test-correct-pika/my-jaeger-collector-745c6c58f6-gftzq to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-collector-745c6c58f6 SuccessfulCreate Created pod: my-jaeger-collector-745c6c58f6-gftzq replicaset-controller logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:26 +0000 UTC Normal Deployment.apps my-jaeger-collector ScalingReplicaSet Scaled up replica set my-jaeger-collector-745c6c58f6 to 1 deployment-controller logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:26 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b Binding Scheduled Successfully assigned kuttl-test-correct-pika/my-jaeger-query-7c7b7c649b-4pp2b to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:26 +0000 UTC Normal ReplicaSet.apps my-jaeger-query-7c7b7c649b SuccessfulCreate Created pod: my-jaeger-query-7c7b7c649b-4pp2b replicaset-controller logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:26 +0000 UTC Normal Deployment.apps my-jaeger-query ScalingReplicaSet Scaled up replica set my-jaeger-query-7c7b7c649b to 1 deployment-controller logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-collector-745c6c58f6-gftzq AddedInterface Add eth0 [10.130.0.29/23] from ovn-kubernetes multus logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-collector-745c6c58f6-gftzq.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-collector-745c6c58f6-gftzq.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-collector-745c6c58f6-gftzq.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b AddedInterface Add eth0 [10.130.0.30/23] from ovn-kubernetes multus logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:27 +0000 UTC Normal Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:29 +0000 UTC Warning Pod my-jaeger-collector-745c6c58f6-gftzq.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:29 +0000 UTC Warning Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-query} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:39 +0000 UTC Warning Pod my-jaeger-collector-745c6c58f6-gftzq.spec.containers{jaeger-collector} BackOff Back-off restarting failed container jaeger-collector in pod my-jaeger-collector-745c6c58f6-gftzq_kuttl-test-correct-pika(a408f79b-4f86-40fa-ab03-d4f2cfda8f9c) kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:39 +0000 UTC Warning Pod my-jaeger-query-7c7b7c649b-4pp2b.spec.containers{jaeger-query} BackOff Back-off restarting failed container jaeger-query in pod my-jaeger-query-7c7b7c649b-4pp2b_kuttl-test-correct-pika(b0a63a90-5a5c-4bf1-a380-b1a70936d559) kubelet logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:29:22 | collector-otlp-production-grpc | 2024-09-19 01:19:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling my-jaeger-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:29:22 | collector-otlp-production-grpc | Deleting namespace: kuttl-test-correct-pika === CONT kuttl/harness/collector-autoscale logger.go:42: 01:29:28 | collector-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 01:29:28 | collector-autoscale | Creating namespace: kuttl-test-climbing-collie logger.go:42: 01:29:28 | collector-autoscale/1-install | starting test step 1-install logger.go:42: 01:29:28 | collector-autoscale/1-install | Jaeger:kuttl-test-climbing-collie/simple-prod created logger.go:42: 01:39:29 | collector-autoscale/1-install | test step failed 1-install case.go:364: failed in step 1-install case.go:366: --- Deployment:kuttl-test-climbing-collie/simple-prod-collector +++ Deployment:kuttl-test-climbing-collie/simple-prod-collector @@ -1,10 +1,418 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"e566bbb9-4a4b-4916-a2d4-9893f5f65f75"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-collector"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"COLLECTOR_OTLP_ENABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"COLLECTOR_ZIPKIN_HOST_PORT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":4317,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":4318,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":9411,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14250,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14267,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14268,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14269,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: + .: {} + f:requests: + .: {} + f:memory: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/jaeger/sampling"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls-config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"simple-prod-collector-tls-config-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"simple-prod-sampling-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:29:32Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:29:32Z" name: simple-prod-collector namespace: kuttl-test-climbing-collie + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: simple-prod + uid: e566bbb9-4a4b-4916-a2d4-9893f5f65f75 spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --collector.grpc.tls.cert=/etc/tls-config/tls.crt + - --collector.grpc.tls.enabled=true + - --collector.grpc.tls.key=/etc/tls-config/tls.key + - --sampling.strategies-file=/etc/jaeger/sampling/sampling.json + - --es.server-urls=https://elasticsearch.kuttl-test-climbing-collie.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: COLLECTOR_ZIPKIN_HOST_PORT + value: :9411 + - name: COLLECTOR_OTLP_ENABLED + value: "true" + image: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-collector + ports: + - containerPort: 9411 + name: zipkin + protocol: TCP + - containerPort: 14267 + name: c-tchan-trft + protocol: TCP + - containerPort: 14268 + name: c-binary-trft + protocol: TCP + - containerPort: 14269 + name: admin-http + protocol: TCP + - containerPort: 14250 + name: grpc + protocol: TCP + - containerPort: 4317 + name: grpc-otlp + protocol: TCP + - containerPort: 4318 + name: http-otlp + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: + requests: + memory: 200m + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/jaeger/sampling + name: simple-prod-sampling-configuration-volume + readOnly: true + - mountPath: /etc/tls-config + name: simple-prod-collector-tls-config-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: simple-prod + serviceAccountName: simple-prod + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: sampling + path: sampling.json + name: simple-prod-sampling-configuration + name: simple-prod-sampling-configuration-volume + - name: simple-prod-collector-tls-config-volume + secret: + defaultMode: 420 + secretName: simple-prod-collector-headless-tls + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: simple-prod-trusted-ca + name: simple-prod-trusted-ca + - name: certs + secret: + defaultMode: 420 + secretName: simple-prod-jaeger-elasticsearch status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:29:32Z" + lastUpdateTime: "2024-09-19T01:29:32Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:29:32Z" + lastUpdateTime: "2024-09-19T01:29:32Z" + message: ReplicaSet "simple-prod-collector-5db4cb7df6" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 1 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-climbing-collie/simple-prod-collector: .status.readyReplicas: key is missing from map case.go:366: --- Deployment:kuttl-test-climbing-collie/simple-prod-query +++ Deployment:kuttl-test-climbing-collie/simple-prod-query @@ -1,10 +1,508 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.jaegertracing.io/inject: simple-prod + sidecar.jaegertracing.io/revision: "1" + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + sidecar.jaegertracing.io/injected: simple-prod + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.jaegertracing.io/inject: {} + f:sidecar.jaegertracing.io/revision: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"e566bbb9-4a4b-4916-a2d4-9893f5f65f75"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:sidecar.jaegertracing.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-query"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"JAEGER_DISABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"METRICS_STORAGE_TYPE"}: + .: {} + f:name: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":16685,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16686,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16687,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"name":"oauth-proxy"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":8443,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls/private"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"simple-prod-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-ui-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-ui-oauth-proxy-tls"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:34:33Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:34:33Z" name: simple-prod-query namespace: kuttl-test-climbing-collie + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: simple-prod + uid: e566bbb9-4a4b-4916-a2d4-9893f5f65f75 spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + sidecar.jaegertracing.io/inject: simple-prod + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --query.ui-config=/etc/config/ui.json + - --es.server-urls=https://elasticsearch.kuttl-test-climbing-collie.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: METRICS_STORAGE_TYPE + - name: JAEGER_DISABLED + value: "false" + - name: JAEGER_SERVICE_NAME + value: simple-prod.kuttl-test-climbing-collie + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-query + ports: + - containerPort: 16685 + name: grpc-query + protocol: TCP + - containerPort: 16686 + name: query + protocol: TCP + - containerPort: 16687 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/config + name: simple-prod-ui-configuration-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + - args: + - --cookie-secret=RYthamLSe0Eaw2mstMzrHB + - --https-address=:8443 + - '--openshift-sar={"namespace": "kuttl-test-climbing-collie", "resource": + "pods", "verb": "get"}' + - --openshift-service-account=simple-prod-ui-proxy + - --provider=openshift + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:16686 + env: + - name: JAEGER_SERVICE_NAME + value: simple-prod.kuttl-test-climbing-collie + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508 + imagePullPolicy: IfNotPresent + name: oauth-proxy + ports: + - containerPort: 8443 + name: public + protocol: TCP + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/tls/private + name: simple-prod-ui-oauth-proxy-tls + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - args: + - --agent.tags=cluster=undefined,deployment.name=simple-prod-query,host.ip=${HOST_IP:},pod.name=${POD_NAME:},pod.namespace=kuttl-test-climbing-collie + - --reporter.grpc.host-port=dns:///simple-prod-collector-headless.kuttl-test-climbing-collie.svc:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + env: + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: HOST_IP + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: status.hostIP + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent + ports: + - containerPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: simple-prod-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: simple-prod-ui-proxy + serviceAccountName: simple-prod-ui-proxy + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: ui + path: ui.json + name: simple-prod-ui-configuration + name: simple-prod-ui-configuration-volume + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: simple-prod-trusted-ca + name: simple-prod-trusted-ca + - name: simple-prod-ui-oauth-proxy-tls + secret: + defaultMode: 420 + secretName: simple-prod-ui-oauth-proxy-tls + - name: certs + secret: + defaultMode: 420 + secretName: simple-prod-jaeger-elasticsearch + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: simple-prod-service-ca + name: simple-prod-service-ca status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:29:32Z" + lastUpdateTime: "2024-09-19T01:29:32Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:29:32Z" + lastUpdateTime: "2024-09-19T01:29:32Z" + message: ReplicaSet "simple-prod-query-6c6568f778" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 3 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-climbing-collie/simple-prod-query: .status.readyReplicas: key is missing from map logger.go:42: 01:39:29 | collector-autoscale | collector-autoscale events from ns kuttl-test-climbing-collie: logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:32 +0000 UTC Normal Pod simple-prod-collector-5db4cb7df6-kchm9 Binding Scheduled Successfully assigned kuttl-test-climbing-collie/simple-prod-collector-5db4cb7df6-kchm9 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:32 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-5db4cb7df6 SuccessfulCreate Created pod: simple-prod-collector-5db4cb7df6-kchm9 replicaset-controller logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:32 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-5db4cb7df6 to 1 deployment-controller logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:32 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq Binding Scheduled Successfully assigned kuttl-test-climbing-collie/simple-prod-query-6c6568f778-hjhfq to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:32 +0000 UTC Normal ReplicaSet.apps simple-prod-query-6c6568f778 SuccessfulCreate Created pod: simple-prod-query-6c6568f778-hjhfq replicaset-controller logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:32 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-6c6568f778 to 1 deployment-controller logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-collector-5db4cb7df6-kchm9 AddedInterface Add eth0 [10.130.0.33/23] from ovn-kubernetes multus logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-collector-5db4cb7df6-kchm9.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-collector-5db4cb7df6-kchm9.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-collector-5db4cb7df6-kchm9.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq AddedInterface Add eth0 [10.130.0.32/23] from ovn-kubernetes multus logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:33 +0000 UTC Normal Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:34 +0000 UTC Warning Pod simple-prod-collector-5db4cb7df6-kchm9.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:34 +0000 UTC Warning Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-query} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:44 +0000 UTC Warning Pod simple-prod-collector-5db4cb7df6-kchm9.spec.containers{jaeger-collector} BackOff Back-off restarting failed container jaeger-collector in pod simple-prod-collector-5db4cb7df6-kchm9_kuttl-test-climbing-collie(fdbf0ca9-0961-4067-9f8a-6947981778bf) kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:44 +0000 UTC Warning Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-query} BackOff Back-off restarting failed container jaeger-query in pod simple-prod-query-6c6568f778-hjhfq_kuttl-test-climbing-collie(9848ca15-72aa-4641-ad29-f4dc69f9dc7a) kubelet logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:29:47 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:39:29 | collector-autoscale | 2024-09-19 01:30:03 +0000 UTC Warning Pod simple-prod-query-6c6568f778-hjhfq.spec.containers{jaeger-query} Unhealthy Liveness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:39:29 | collector-autoscale | Deleting namespace: kuttl-test-climbing-collie === CONT kuttl/harness/collector-otlp-allinone-grpc logger.go:42: 01:39:35 | collector-otlp-allinone-grpc | Creating namespace: kuttl-test-able-ox logger.go:42: 01:39:35 | collector-otlp-allinone-grpc/0-install | starting test step 0-install logger.go:42: 01:39:36 | collector-otlp-allinone-grpc/0-install | Jaeger:kuttl-test-able-ox/my-jaeger created logger.go:42: 01:39:39 | collector-otlp-allinone-grpc/0-install | test step completed 0-install logger.go:42: 01:39:39 | collector-otlp-allinone-grpc/1-smoke-test | starting test step 1-smoke-test logger.go:42: 01:39:39 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 01:39:41 | collector-otlp-allinone-grpc/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 01:39:50 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c REPORTING_PROTOCOL=grpc ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest OTEL_EXPORTER_OTLP_ENDPOINT=http://my-jaeger-collector-headless:4317 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/otlp-smoke-test.yaml.template -o otlp-smoke-test-job.yaml] logger.go:42: 01:39:50 | collector-otlp-allinone-grpc/1-smoke-test | running command: [sh -c kubectl create -f otlp-smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 01:39:51 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/report-span created logger.go:42: 01:39:51 | collector-otlp-allinone-grpc/1-smoke-test | job.batch/check-span created logger.go:42: 01:40:11 | collector-otlp-allinone-grpc/1-smoke-test | test step completed 1-smoke-test logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | collector-otlp-allinone-grpc events from ns kuttl-test-able-ox: logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:36 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc Binding Scheduled Successfully assigned kuttl-test-able-ox/my-jaeger-7dfc86957-vlpmc to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:36 +0000 UTC Normal ReplicaSet.apps my-jaeger-7dfc86957 SuccessfulCreate Created pod: my-jaeger-7dfc86957-vlpmc replicaset-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:36 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-7dfc86957 to 1 deployment-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:37 +0000 UTC Warning Pod my-jaeger-7dfc86957-vlpmc FailedMount MountVolume.SetUp failed for volume "my-jaeger-ui-oauth-proxy-tls" : secret "my-jaeger-ui-oauth-proxy-tls" not found kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:37 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc AddedInterface Add eth0 [10.130.0.35/23] from ovn-kubernetes multus logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:37 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:37 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:37 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:38 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:38 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:38 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:43 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:43 +0000 UTC Normal Pod my-jaeger-7dfc86957-vlpmc.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:43 +0000 UTC Normal ReplicaSet.apps my-jaeger-7dfc86957 SuccessfulDelete Deleted pod: my-jaeger-7dfc86957-vlpmc replicaset-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:43 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-7dfc86957 to 0 from 1 deployment-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n Binding Scheduled Successfully assigned kuttl-test-able-ox/my-jaeger-5c69cd49bb-7875n to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n AddedInterface Add eth0 [10.130.0.36/23] from ovn-kubernetes multus logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal ReplicaSet.apps my-jaeger-5c69cd49bb SuccessfulCreate Created pod: my-jaeger-5c69cd49bb-7875n replicaset-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:44 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-5c69cd49bb to 1 deployment-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:45 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:45 +0000 UTC Normal Pod my-jaeger-5c69cd49bb-7875n.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Pod check-span-x7nq7 Binding Scheduled Successfully assigned kuttl-test-able-ox/check-span-x7nq7 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Pod check-span-x7nq7 AddedInterface Add eth0 [10.130.0.38/23] from ovn-kubernetes multus logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Pod check-span-x7nq7.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-x7nq7 job-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Pod report-span-hvfjn Binding Scheduled Successfully assigned kuttl-test-able-ox/report-span-hvfjn to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Pod report-span-hvfjn AddedInterface Add eth0 [10.130.0.37/23] from ovn-kubernetes multus logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Pod report-span-hvfjn.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:51 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-hvfjn job-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:52 +0000 UTC Normal Pod check-span-x7nq7.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 805ms (805ms including waiting) kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:52 +0000 UTC Normal Pod check-span-x7nq7.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:52 +0000 UTC Normal Pod check-span-x7nq7.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:52 +0000 UTC Normal Pod report-span-hvfjn.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 869ms (869ms including waiting) kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:52 +0000 UTC Normal Pod report-span-hvfjn.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:39:52 +0000 UTC Normal Pod report-span-hvfjn.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | 2024-09-19 01:40:11 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 01:40:11 | collector-otlp-allinone-grpc | Deleting namespace: kuttl-test-able-ox === CONT kuttl/harness/cassandra-spark logger.go:42: 01:40:23 | cassandra-spark | Ignoring 01-assert.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 01:40:23 | cassandra-spark | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 01:40:23 | cassandra-spark | Creating namespace: kuttl-test-firm-piglet logger.go:42: 01:40:23 | cassandra-spark | cassandra-spark events from ns kuttl-test-firm-piglet: logger.go:42: 01:40:23 | cassandra-spark | Deleting namespace: kuttl-test-firm-piglet === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (2538.06s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.09s) --- PASS: kuttl/harness/collector-otlp-allinone-http (47.76s) --- FAIL: kuttl/harness/set-custom-img (607.75s) --- FAIL: kuttl/harness/collector-otlp-production-http (606.81s) --- FAIL: kuttl/harness/collector-otlp-production-grpc (607.77s) --- FAIL: kuttl/harness/collector-autoscale (607.21s) --- PASS: kuttl/harness/collector-otlp-allinone-grpc (48.08s) --- PASS: kuttl/harness/cassandra-spark (6.00s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name miscellaneous --report --output /logs/artifacts/miscellaneous.xml ./artifacts/kuttl-report.xml time="2024-09-19T01:40:31Z" level=debug msg="Setting a new name for the test suites" time="2024-09-19T01:40:31Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-09-19T01:40:31Z" level=debug msg="normalizing test case names" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/artifacts -> miscellaneous_artifacts" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/collector-otlp-allinone-http -> miscellaneous_collector_otlp_allinone_http" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/set-custom-img -> miscellaneous_set_custom_img" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/collector-otlp-production-http -> miscellaneous_collector_otlp_production_http" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/collector-otlp-production-grpc -> miscellaneous_collector_otlp_production_grpc" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/collector-autoscale -> miscellaneous_collector_autoscale" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/collector-otlp-allinone-grpc -> miscellaneous_collector_otlp_allinone_grpc" time="2024-09-19T01:40:31Z" level=debug msg="miscellaneous/cassandra-spark -> miscellaneous_cassandra_spark" +----------------------------------------------+--------+ | NAME | RESULT | +----------------------------------------------+--------+ | miscellaneous_artifacts | passed | | miscellaneous_collector_otlp_allinone_http | passed | | miscellaneous_set_custom_img | failed | | miscellaneous_collector_otlp_production_http | failed | | miscellaneous_collector_otlp_production_grpc | failed | | miscellaneous_collector_autoscale | failed | | miscellaneous_collector_otlp_allinone_grpc | passed | | miscellaneous_cassandra_spark | passed | +----------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh elasticsearch false true + '[' 3 -ne 3 ']' + test_suite_name=elasticsearch + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/elasticsearch.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-elasticsearch make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true \ KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ ./tests/e2e/elasticsearch/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 55m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 55m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/elasticsearch/render.sh ++ export SUITE_DIR=./tests/e2e/elasticsearch ++ SUITE_DIR=./tests/e2e/elasticsearch ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/elasticsearch ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + start_test es-from-aio-to-production + '[' 1 -ne 1 ']' + test_name=es-from-aio-to-production + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-from-aio-to-production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-from-aio-to-production\e[0m' Rendering files for test es-from-aio-to-production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-from-aio-to-production + cd es-from-aio-to-production + jaeger_name=my-jaeger + render_install_jaeger my-jaeger allInOne 00 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=allInOne + test_step=00 + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + jaeger_deploy_mode=production + [[ true = true ]] + [[ true = true ]] + jaeger_deploy_mode=production_autoprovisioned + render_install_jaeger my-jaeger production_autoprovisioned 03 + '[' 3 -ne 3 ']' + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + deploy_mode=production_autoprovisioned + test_step=03 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./03-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch.redundancyPolicy="ZeroRedundancy"' ./03-install.yaml + render_smoke_test my-jaeger true 04 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=04 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./04-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./04-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test es-increasing-replicas + '[' 1 -ne 1 ']' + test_name=es-increasing-replicas + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-increasing-replicas' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-increasing-replicas\e[0m' Rendering files for test es-increasing-replicas + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-from-aio-to-production + '[' es-from-aio-to-production '!=' _build ']' + cd .. + mkdir -p es-increasing-replicas + cd es-increasing-replicas + jaeger_name=simple-prod + '[' true = true ']' + jaeger_deployment_mode=production_autoprovisioned + render_install_jaeger simple-prod production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + cp ./01-install.yaml ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.collector.replicas=2 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.query.replicas=2 ./02-install.yaml + cp ./01-assert.yaml ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.replicas=2 ./02-assert.yaml + /tmp/jaeger-tests/bin/yq e -i .status.readyReplicas=2 ./02-assert.yaml + render_smoke_test simple-prod true 03 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=03 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./03-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + cp ./02-install.yaml ./04-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.elasticsearch.nodeCount=2 ./04-install.yaml + /tmp/jaeger-tests/bin/gomplate -f ./openshift-check-es-nodes.yaml.template -o ./05-check-es-nodes.yaml + '[' true = true ']' + skip_test es-index-cleaner-upstream 'SKIP_ES_EXTERNAL is true' + '[' 2 -ne 2 ']' + test_name=es-index-cleaner-upstream + message='SKIP_ES_EXTERNAL is true' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build/es-increasing-replicas + '[' es-increasing-replicas '!=' _build ']' + cd .. + rm -rf es-index-cleaner-upstream + warning 'es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true\e[0m' WAR: es-index-cleaner-upstream: SKIP_ES_EXTERNAL is true + '[' true = true ']' + es_index_cleaner -autoprov production_autoprovisioned + '[' 2 -ne 2 ']' + postfix=-autoprov + jaeger_deployment_strategy=production_autoprovisioned + start_test es-index-cleaner-autoprov + '[' 1 -ne 1 ']' + test_name=es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== + info 'Rendering files for test es-index-cleaner-autoprov' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test es-index-cleaner-autoprov\e[0m' Rendering files for test es-index-cleaner-autoprov + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/elasticsearch/_build + '[' _build '!=' _build ']' + mkdir -p es-index-cleaner-autoprov + cd es-index-cleaner-autoprov + jaeger_name=test-es-index-cleaner-with-prefix + cronjob_name=test-es-index-cleaner-with-prefix-es-index-cleaner + secured_es_connection=false + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_managed_es ']' + ELASTICSEARCH_URL=https://elasticsearch + secured_es_connection=true + cp ../../es-index-cleaner-upstream/04-assert.yaml ../../es-index-cleaner-upstream/README.md . + render_install_jaeger test-es-index-cleaner-with-prefix production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options.es.index-prefix=""' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.enabled=false ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.storage.esIndexCleaner.numberOfDays=0 ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.esIndexCleaner.schedule="*/1 * * * *"' ./01-install.yaml + render_report_spans test-es-index-cleaner-with-prefix true 5 00 true 02 + '[' 6 -ne 6 ']' + jaeger=test-es-index-cleaner-with-prefix + is_secured=true + number_of_spans=5 + job_number=00 + ensure_reported_spans=true + test_step=02 + export JAEGER_NAME=test-es-index-cleaner-with-prefix + JAEGER_NAME=test-es-index-cleaner-with-prefix + export JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://test-es-index-cleaner-with-prefix-collector-headless:14268 + export JOB_NUMBER=00 + JOB_NUMBER=00 + export DAYS=5 + DAYS=5 + '[' true = true ']' + protocol=https:// + query_port= + template=/tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template + '[' true = true ']' + export ENSURE_REPORTED_SPANS=true + ENSURE_REPORTED_SPANS=true + export JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + JAEGER_QUERY_ENDPOINT=https://test-es-index-cleaner-with-prefix-query + params= + '[' true = true ']' + '[' true = true ']' + '[' '' '!=' allInOne ']' + params='-t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/report-spans.yaml.template -t /tmp/jaeger-tests/tests/templates/openshift/configure-api-query-oauth.yaml.template -o ./02-report-spans.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-report-spans.yaml.template -o ./02-assert.yaml + unset JAEGER_COLLECTOR_ENDPOINT + unset JAEGER_QUERY_ENDPOINT + unset JOB_NUMBER + unset DAYS + unset ENSURE_REPORTED_SPANS + sed 's~enabled: false~enabled: true~gi' ./01-install.yaml + CRONJOB_NAME=test-es-index-cleaner-with-prefix-es-index-cleaner + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/wait-for-cronjob-execution.yaml.template -o ./04-wait-es-index-cleaner.yaml + /tmp/jaeger-tests/bin/gomplate -f ./01-install.yaml -o ./05-install.yaml + render_check_indices true ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' 00 06 + '[' 4 -ne 4 ']' + secured=true + cmd_parameters=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + job_number=00 + test_step=06 + escape_command ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + '[' 1 -ne 1 ']' + command=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ echo ''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' ++ sed 's/\\/\\\\/g' + export 'CMD_PARAMETERS='\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\\d{4}-\\d{2}-\\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + mount_secret= + '[' true = true ']' + '[' true = true ']' + mount_secret=test-es-index-cleaner-with-prefix-curator + JOB_NUMBER=00 + CMD_PARAMETERS=''\''--pattern'\'', '\''jaeger-span-\d{4}-\d{2}-\d{2}'\'', '\''--assert-count-indices'\'', '\''0'\'',' + MOUNT_SECRET=test-es-index-cleaner-with-prefix-curator + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/check-indices.yaml.template -o ./06-check-indices.yaml + JOB_NUMBER=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-check-indices.yaml.template -o ./06-assert.yaml + '[' true = true ']' + get_elasticsearch_openshift_operator_version + export ESO_OPERATOR_VERSION + '[' true = true ']' ++ kubectl get pods -l name=elasticsearch-operator --all-namespaces '-o=jsonpath={.items[0].metadata.annotations.operatorframework\.io/properties}' error: error executing jsonpath "{.items[0].metadata.annotations.operatorframework\\.io/properties}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: template was: {.items[0].metadata.annotations.operatorframework\.io/properties} object given to jsonpath engine was: map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} + properties= make[2]: *** [tests/e2e/elasticsearch/Makefile:2: render-e2e-tests-elasticsearch] Error 1 make[2]: Leaving directory '/tmp/jaeger-tests' make[1]: *** [tests/e2e/elasticsearch/Makefile:8: run-e2e-tests-elasticsearch] Error 2 make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh examples false true + '[' 3 -ne 3 ']' + test_suite_name=examples + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/examples.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-examples make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ VERTX_IMG=jaegertracing/vertx-create-span:operator-e2e-tests \ ./tests/e2e/examples/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 55m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 55m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/examples/render.sh ++ export SUITE_DIR=./tests/e2e/examples ++ SUITE_DIR=./tests/e2e/examples ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/examples ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test examples-agent-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-agent-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-with-priority-class\e[0m' Rendering files for test examples-agent-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + mkdir -p examples-agent-with-priority-class + cd examples-agent-with-priority-class + example_name=agent-with-priority-class + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + '[' true '!=' true ']' + render_install_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + install_file=./02-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/agent-with-priority-class.yaml -o ./02-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./02-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./02-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./02-install.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + JAEGER_NAME=agent-as-daemonset + local jaeger_strategy ++ get_jaeger_strategy ./02-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./02-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./02-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./02-install.yaml ++ strategy=DaemonSet ++ '[' DaemonSet = null ']' ++ echo DaemonSet ++ return 0 + jaeger_strategy=DaemonSet + '[' DaemonSet = DaemonSet ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./02-assert.yaml + render_smoke_test_example agent-with-priority-class 02 + '[' 2 -ne 2 ']' + example_name=agent-with-priority-class + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/agent-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/agent-with-priority-class.yaml ++ jaeger_name=agent-as-daemonset ++ '[' -z agent-as-daemonset ']' ++ echo agent-as-daemonset ++ return 0 + jaeger_name=agent-as-daemonset + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test agent-as-daemonset true 02 + '[' 3 -ne 3 ']' + jaeger=agent-as-daemonset + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 + export JAEGER_NAME=agent-as-daemonset + JAEGER_NAME=agent-as-daemonset + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-all-in-one-with-options + '[' 1 -ne 1 ']' + test_name=examples-all-in-one-with-options + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-all-in-one-with-options' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-all-in-one-with-options\e[0m' Rendering files for test examples-all-in-one-with-options + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-with-priority-class + '[' examples-agent-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-all-in-one-with-options + cd examples-all-in-one-with-options + example_name=all-in-one-with-options + render_install_example all-in-one-with-options 00 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/all-in-one-with-options.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + JAEGER_NAME=my-jaeger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + /tmp/jaeger-tests/bin/yq e -i '.metadata.name="my-jaeger"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i 'del(.spec.allInOne.image)' ./00-install.yaml + render_smoke_test_example all-in-one-with-options 01 + '[' 2 -ne 2 ']' + example_name=all-in-one-with-options + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/all-in-one-with-options.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/all-in-one-with-options.yaml ++ jaeger_name=my-jaeger ++ '[' -z my-jaeger ']' ++ echo my-jaeger ++ return 0 + jaeger_name=my-jaeger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test my-jaeger true 01 + '[' 3 -ne 3 ']' + jaeger=my-jaeger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 + export JAEGER_NAME=my-jaeger + JAEGER_NAME=my-jaeger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' true = true ']' + sed -i s~my-jaeger-query:443~my-jaeger-query:443/jaeger~gi ./01-smoke-test.yaml + start_test examples-business-application-injected-sidecar + '[' 1 -ne 1 ']' + test_name=examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-business-application-injected-sidecar' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-business-application-injected-sidecar\e[0m' Rendering files for test examples-business-application-injected-sidecar + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-all-in-one-with-options + '[' examples-all-in-one-with-options '!=' _build ']' + cd .. + mkdir -p examples-business-application-injected-sidecar + cd examples-business-application-injected-sidecar + example_name=simplest + cp /tmp/jaeger-tests/examples/business-application-injected-sidecar.yaml ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].image=strenv(VERTX_IMG)' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.path="/"' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.httpGet.port=8080' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.initialDelaySeconds=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.failureThreshold=3' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.periodSeconds=10' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.successThreshold=1' ./00-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.template.spec.containers[0].livenessProbe.timeoutSeconds=1' ./00-install.yaml + render_install_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example simplest 02 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 02 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-collector-with-priority-class + '[' 1 -ne 1 ']' + test_name=examples-collector-with-priority-class + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-collector-with-priority-class' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-collector-with-priority-class\e[0m' Rendering files for test examples-collector-with-priority-class + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-business-application-injected-sidecar + '[' examples-business-application-injected-sidecar '!=' _build ']' + cd .. + mkdir -p examples-collector-with-priority-class + cd examples-collector-with-priority-class + example_name=collector-with-priority-class + render_install_example collector-with-priority-class 00 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/collector-with-priority-class.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + JAEGER_NAME=collector-with-high-priority + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example collector-with-priority-class 01 + '[' 2 -ne 2 ']' + example_name=collector-with-priority-class + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/collector-with-priority-class.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/collector-with-priority-class.yaml ++ jaeger_name=collector-with-high-priority ++ '[' -z collector-with-high-priority ']' ++ echo collector-with-high-priority ++ return 0 + jaeger_name=collector-with-high-priority + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test collector-with-high-priority true 01 + '[' 3 -ne 3 ']' + jaeger=collector-with-high-priority + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + JAEGER_QUERY_ENDPOINT=https://collector-with-high-priority-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://collector-with-high-priority-collector-headless:14268 + export JAEGER_NAME=collector-with-high-priority + JAEGER_NAME=collector-with-high-priority + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-service-types + '[' 1 -ne 1 ']' + test_name=examples-service-types + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-service-types' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-service-types\e[0m' Rendering files for test examples-service-types + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-collector-with-priority-class + '[' examples-collector-with-priority-class '!=' _build ']' + cd .. + mkdir -p examples-service-types + cd examples-service-types + example_name=service-types + render_install_example service-types 00 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/service-types.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + JAEGER_NAME=service-types + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example service-types 01 + '[' 2 -ne 2 ']' + example_name=service-types + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/service-types.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/service-types.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/service-types.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/service-types.yaml ++ jaeger_name=service-types ++ '[' -z service-types ']' ++ echo service-types ++ return 0 + jaeger_name=service-types + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test service-types true 01 + '[' 3 -ne 3 ']' + jaeger=service-types + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + JAEGER_QUERY_ENDPOINT=https://service-types-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 + export JAEGER_NAME=service-types + JAEGER_NAME=service-types + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod + '[' 1 -ne 1 ']' + test_name=examples-simple-prod + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod\e[0m' Rendering files for test examples-simple-prod + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-service-types + '[' examples-service-types '!=' _build ']' + cd .. + mkdir -p examples-simple-prod + cd examples-simple-prod + example_name=simple-prod + render_install_example simple-prod 01 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod 02 + '[' 2 -ne 2 ']' + example_name=simple-prod + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-simple-prod-with-volumes + '[' 1 -ne 1 ']' + test_name=examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simple-prod-with-volumes' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simple-prod-with-volumes\e[0m' Rendering files for test examples-simple-prod-with-volumes + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod + '[' examples-simple-prod '!=' _build ']' + cd .. + mkdir -p examples-simple-prod-with-volumes + cd examples-simple-prod-with-volumes + example_name=simple-prod-with-volumes + render_install_example simple-prod-with-volumes 01 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + JAEGER_NAME=simple-prod + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=production ++ '[' production = production ']' ++ echo production ++ return 0 + jaeger_strategy=production + '[' production = DaemonSet ']' + '[' production = allInOne ']' + '[' production = production ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + [[ true = true ]] + [[ true = true ]] + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.options={}' ./01-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.storage.elasticsearch={"nodeCount":1,"resources":{"limits":{"memory":"2Gi"}}}' ./01-install.yaml + render_smoke_test_example simple-prod-with-volumes 02 + '[' 2 -ne 2 ']' + example_name=simple-prod-with-volumes + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simple-prod-with-volumes.yaml ++ jaeger_name=simple-prod ++ '[' -z simple-prod ']' ++ echo simple-prod ++ return 0 + jaeger_name=simple-prod + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simple-prod true 02 + '[' 3 -ne 3 ']' + jaeger=simple-prod + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-prod-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-prod-collector-headless:14268 + export JAEGER_NAME=simple-prod + JAEGER_NAME=simple-prod + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + /tmp/jaeger-tests/bin/gomplate -f ./03-check-volume.yaml.template -o 03-check-volume.yaml + start_test examples-simplest + '[' 1 -ne 1 ']' + test_name=examples-simplest + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-simplest' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-simplest\e[0m' Rendering files for test examples-simplest + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simple-prod-with-volumes + '[' examples-simple-prod-with-volumes '!=' _build ']' + cd .. + mkdir -p examples-simplest + cd examples-simplest + example_name=simplest + render_install_example simplest 00 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/simplest.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + JAEGER_NAME=simplest + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example simplest 01 + '[' 2 -ne 2 ']' + example_name=simplest + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/simplest.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/simplest.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/simplest.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/simplest.yaml ++ jaeger_name=simplest ++ '[' -z simplest ']' ++ echo simplest ++ return 0 + jaeger_name=simplest + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test simplest true 01 + '[' 3 -ne 3 ']' + jaeger=simplest + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + JAEGER_QUERY_ENDPOINT=https://simplest-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 + export JAEGER_NAME=simplest + JAEGER_NAME=simplest + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger + '[' 1 -ne 1 ']' + test_name=examples-with-badger + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger\e[0m' Rendering files for test examples-with-badger + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-simplest + '[' examples-simplest '!=' _build ']' + cd .. + mkdir -p examples-with-badger + cd examples-with-badger + example_name=with-badger + render_install_example with-badger 00 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + JAEGER_NAME=with-badger + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger 01 + '[' 2 -ne 2 ']' + example_name=with-badger + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger.yaml ++ jaeger_name=with-badger ++ '[' -z with-badger ']' ++ echo with-badger ++ return 0 + jaeger_name=with-badger + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 + export JAEGER_NAME=with-badger + JAEGER_NAME=with-badger + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-badger-and-volume + '[' 1 -ne 1 ']' + test_name=examples-with-badger-and-volume + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-badger-and-volume' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-badger-and-volume\e[0m' Rendering files for test examples-with-badger-and-volume + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger + '[' examples-with-badger '!=' _build ']' + cd .. + mkdir -p examples-with-badger-and-volume + cd examples-with-badger-and-volume + example_name=with-badger-and-volume + render_install_example with-badger-and-volume 00 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=00 + install_file=./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-badger-and-volume.yaml -o ./00-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./00-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./00-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./00-install.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + JAEGER_NAME=with-badger-and-volume + local jaeger_strategy ++ get_jaeger_strategy ./00-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./00-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = production ']' ++ '[' null = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./00-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./00-assert.yaml + render_smoke_test_example with-badger-and-volume 01 + '[' 2 -ne 2 ']' + example_name=with-badger-and-volume + test_step=01 + deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-badger-and-volume.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-badger-and-volume.yaml ++ jaeger_name=with-badger-and-volume ++ '[' -z with-badger-and-volume ']' ++ echo with-badger-and-volume ++ return 0 + jaeger_name=with-badger-and-volume + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-badger-and-volume true 01 + '[' 3 -ne 3 ']' + jaeger=with-badger-and-volume + is_secured=true + test_step=01 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + JAEGER_QUERY_ENDPOINT=https://with-badger-and-volume-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-badger-and-volume-collector-headless:14268 + export JAEGER_NAME=with-badger-and-volume + JAEGER_NAME=with-badger-and-volume + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./01-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./01-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-cassandra + '[' 1 -ne 1 ']' + test_name=examples-with-cassandra + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-cassandra' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-cassandra\e[0m' Rendering files for test examples-with-cassandra + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-badger-and-volume + '[' examples-with-badger-and-volume '!=' _build ']' + cd .. + mkdir -p examples-with-cassandra + cd examples-with-cassandra + example_name=with-cassandra + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-cassandra 01 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-cassandra.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + JAEGER_NAME=with-cassandra + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-cassandra 02 + '[' 2 -ne 2 ']' + example_name=with-cassandra + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-cassandra.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-cassandra.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-cassandra.yaml ++ jaeger_name=with-cassandra ++ '[' -z with-cassandra ']' ++ echo with-cassandra ++ return 0 + jaeger_name=with-cassandra + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-cassandra true 02 + '[' 3 -ne 3 ']' + jaeger=with-cassandra + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 + export JAEGER_NAME=with-cassandra + JAEGER_NAME=with-cassandra + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-with-sampling + '[' 1 -ne 1 ']' + test_name=examples-with-sampling + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-with-sampling' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-with-sampling\e[0m' Rendering files for test examples-with-sampling + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-cassandra + '[' examples-with-cassandra '!=' _build ']' + cd .. + mkdir -p examples-with-sampling + cd examples-with-sampling + export example_name=with-sampling + example_name=with-sampling + render_install_cassandra 00 + '[' 1 -ne 1 ']' + test_step=00 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/cassandra-assert.yaml.template -o ./00-assert.yaml + render_install_example with-sampling 01 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=01 + install_file=./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/with-sampling.yaml -o ./01-install.yaml + sed -i 's~server-urls: http://elasticsearch.default.svc:9200~server-urls: http://elasticsearch:9200~gi' ./01-install.yaml + sed -i s~cassandra.default.svc~cassandra~gi ./01-install.yaml + export JAEGER_NAME ++ get_jaeger_name ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' ./01-install.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + JAEGER_NAME=with-sampling + local jaeger_strategy ++ get_jaeger_strategy ./01-install.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=./01-install.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.strategy' ./01-install.yaml ++ strategy=allInOne ++ '[' allInOne = production ']' ++ '[' allInOne = streaming ']' +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").spec.agent.strategy' ./01-install.yaml ++ strategy=null ++ '[' null = null ']' ++ echo allInOne ++ return 0 + jaeger_strategy=allInOne + '[' allInOne = DaemonSet ']' + '[' allInOne = allInOne ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + render_smoke_test_example with-sampling 02 + '[' 2 -ne 2 ']' + example_name=with-sampling + test_step=02 + deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml ++ get_jaeger_name /tmp/jaeger-tests/examples/with-sampling.yaml ++ '[' 1 -ne 1 ']' ++ deployment_file=/tmp/jaeger-tests/examples/with-sampling.yaml +++ /tmp/jaeger-tests/bin/yq e '. | select(.kind == "Jaeger").metadata.name' /tmp/jaeger-tests/examples/with-sampling.yaml ++ jaeger_name=with-sampling ++ '[' -z with-sampling ']' ++ echo with-sampling ++ return 0 + jaeger_name=with-sampling + is_secured=false + '[' true = true ']' + is_secured=true + render_smoke_test with-sampling true 02 + '[' 3 -ne 3 ']' + jaeger=with-sampling + is_secured=true + test_step=02 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 + export JAEGER_NAME=with-sampling + JAEGER_NAME=with-sampling + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./02-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./02-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + start_test examples-agent-as-daemonset + '[' 1 -ne 1 ']' + test_name=examples-agent-as-daemonset + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-agent-as-daemonset' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-agent-as-daemonset\e[0m' Rendering files for test examples-agent-as-daemonset + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-with-sampling + '[' examples-with-sampling '!=' _build ']' + cd .. + mkdir -p examples-agent-as-daemonset + cd examples-agent-as-daemonset + '[' true = true ']' + prepare_daemonset 00 + '[' 1 -ne 1 ']' + test_step=00 + '[' true = true ']' + cat /tmp/jaeger-tests/examples/openshift/hostport-scc-daemonset.yaml + echo --- + cat /tmp/jaeger-tests/examples/openshift/service_account_jaeger-agent-daemonset.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/agent-as-daemonset.yaml -o 02-install.yaml + '[' true = true ']' + start_test examples-openshift-with-htpasswd + '[' 1 -ne 1 ']' + test_name=examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== + info 'Rendering files for test examples-openshift-with-htpasswd' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test examples-openshift-with-htpasswd\e[0m' Rendering files for test examples-openshift-with-htpasswd + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-agent-as-daemonset + '[' examples-agent-as-daemonset '!=' _build ']' + cd .. + mkdir -p examples-openshift-with-htpasswd + cd examples-openshift-with-htpasswd + export JAEGER_NAME=with-htpasswd + JAEGER_NAME=with-htpasswd + export JAEGER_USERNAME=awesomeuser + JAEGER_USERNAME=awesomeuser + export JAEGER_PASSWORD=awesomepassword + JAEGER_PASSWORD=awesomepassword + export 'JAEGER_USER_PASSWORD_HASH=awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' + JAEGER_USER_PASSWORD_HASH='awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ echo 'awesomeuser:{SHA}uUdqPVUyqNBmERU0Qxj3KFaZnjw=' ++ base64 + SECRET=YXdlc29tZXVzZXI6e1NIQX11VWRxUFZVeXFOQm1FUlUwUXhqM0tGYVpuanc9Cg== + /tmp/jaeger-tests/bin/gomplate -f ./00-install.yaml.template -o ./00-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/examples/openshift/with-htpasswd.yaml -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/allinone-jaeger-assert.yaml.template -o ./01-assert.yaml + export 'GET_URL_COMMAND=kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + export 'URL=https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + INSECURE=true + JAEGER_USERNAME= + JAEGER_PASSWORD= + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-unsecured.yaml + JAEGER_USERNAME=wronguser + JAEGER_PASSWORD=wrongpassword + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-check-unauthorized.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./04-check-authorized.yaml + skip_test examples-agent-as-daemonset 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-agent-as-daemonset + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build/examples-openshift-with-htpasswd + '[' examples-openshift-with-htpasswd '!=' _build ']' + cd .. + rm -rf examples-agent-as-daemonset + warning 'examples-agent-as-daemonset: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-agent-as-daemonset: This test is flaky in Prow CI\e[0m' WAR: examples-agent-as-daemonset: This test is flaky in Prow CI + skip_test examples-with-badger-and-volume 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-with-badger-and-volume + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + rm -rf examples-with-badger-and-volume + warning 'examples-with-badger-and-volume: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-with-badger-and-volume: This test is flaky in Prow CI\e[0m' WAR: examples-with-badger-and-volume: This test is flaky in Prow CI + skip_test examples-collector-with-priority-class 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=examples-collector-with-priority-class + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/examples/_build + '[' _build '!=' _build ']' + rm -rf examples-collector-with-priority-class + warning 'examples-collector-with-priority-class: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: examples-collector-with-priority-class: This test is flaky in Prow CI\e[0m' WAR: examples-collector-with-priority-class: This test is flaky in Prow CI make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running examples E2E tests' Running examples E2E tests + cd tests/e2e/examples/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1829818249 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-rosa-h-a4mj.qd6c.s3.devshift.org:443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 12 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/examples-agent-with-priority-class === PAUSE kuttl/harness/examples-agent-with-priority-class === RUN kuttl/harness/examples-all-in-one-with-options === PAUSE kuttl/harness/examples-all-in-one-with-options === RUN kuttl/harness/examples-business-application-injected-sidecar === PAUSE kuttl/harness/examples-business-application-injected-sidecar === RUN kuttl/harness/examples-openshift-with-htpasswd === PAUSE kuttl/harness/examples-openshift-with-htpasswd === RUN kuttl/harness/examples-service-types === PAUSE kuttl/harness/examples-service-types === RUN kuttl/harness/examples-simple-prod === PAUSE kuttl/harness/examples-simple-prod === RUN kuttl/harness/examples-simple-prod-with-volumes === PAUSE kuttl/harness/examples-simple-prod-with-volumes === RUN kuttl/harness/examples-simplest === PAUSE kuttl/harness/examples-simplest === RUN kuttl/harness/examples-with-badger === PAUSE kuttl/harness/examples-with-badger === RUN kuttl/harness/examples-with-cassandra === PAUSE kuttl/harness/examples-with-cassandra === RUN kuttl/harness/examples-with-sampling === PAUSE kuttl/harness/examples-with-sampling === CONT kuttl/harness/artifacts logger.go:42: 01:41:14 | artifacts | Creating namespace: kuttl-test-new-owl logger.go:42: 01:41:14 | artifacts | artifacts events from ns kuttl-test-new-owl: logger.go:42: 01:41:14 | artifacts | Deleting namespace: kuttl-test-new-owl === CONT kuttl/harness/examples-simple-prod logger.go:42: 01:41:20 | examples-simple-prod | Creating namespace: kuttl-test-premium-imp logger.go:42: 01:41:20 | examples-simple-prod/1-install | starting test step 1-install logger.go:42: 01:41:20 | examples-simple-prod/1-install | Jaeger:kuttl-test-premium-imp/simple-prod created logger.go:42: 01:51:21 | examples-simple-prod/1-install | test step failed 1-install case.go:364: failed in step 1-install case.go:366: --- Deployment:kuttl-test-premium-imp/simple-prod-collector +++ Deployment:kuttl-test-premium-imp/simple-prod-collector @@ -1,10 +1,412 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"28781de4-3202-4aac-81b5-dc8411f7978f"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-collector"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"COLLECTOR_OTLP_ENABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"COLLECTOR_ZIPKIN_HOST_PORT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":4317,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":4318,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":9411,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14250,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14267,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14268,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14269,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/jaeger/sampling"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls-config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"simple-prod-collector-tls-config-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"simple-prod-sampling-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:41:25Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:41:25Z" name: simple-prod-collector namespace: kuttl-test-premium-imp + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: simple-prod + uid: 28781de4-3202-4aac-81b5-dc8411f7978f spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --collector.grpc.tls.cert=/etc/tls-config/tls.crt + - --collector.grpc.tls.enabled=true + - --collector.grpc.tls.key=/etc/tls-config/tls.key + - --sampling.strategies-file=/etc/jaeger/sampling/sampling.json + - --es.server-urls=https://elasticsearch.kuttl-test-premium-imp.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: COLLECTOR_ZIPKIN_HOST_PORT + value: :9411 + - name: COLLECTOR_OTLP_ENABLED + value: "true" + image: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-collector + ports: + - containerPort: 9411 + name: zipkin + protocol: TCP + - containerPort: 14267 + name: c-tchan-trft + protocol: TCP + - containerPort: 14268 + name: c-binary-trft + protocol: TCP + - containerPort: 14269 + name: admin-http + protocol: TCP + - containerPort: 14250 + name: grpc + protocol: TCP + - containerPort: 4317 + name: grpc-otlp + protocol: TCP + - containerPort: 4318 + name: http-otlp + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/jaeger/sampling + name: simple-prod-sampling-configuration-volume + readOnly: true + - mountPath: /etc/tls-config + name: simple-prod-collector-tls-config-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: simple-prod + serviceAccountName: simple-prod + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: sampling + path: sampling.json + name: simple-prod-sampling-configuration + name: simple-prod-sampling-configuration-volume + - name: simple-prod-collector-tls-config-volume + secret: + defaultMode: 420 + secretName: simple-prod-collector-headless-tls + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: simple-prod-trusted-ca + name: simple-prod-trusted-ca + - name: certs + secret: + defaultMode: 420 + secretName: simple-prod-jaeger-elasticsearch status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:41:25Z" + lastUpdateTime: "2024-09-19T01:41:25Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:41:25Z" + lastUpdateTime: "2024-09-19T01:41:25Z" + message: ReplicaSet "simple-prod-collector-67d4f9c874" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 1 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-premium-imp/simple-prod-collector: .status.readyReplicas: key is missing from map case.go:366: --- Deployment:kuttl-test-premium-imp/simple-prod-query +++ Deployment:kuttl-test-premium-imp/simple-prod-query @@ -1,10 +1,508 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.jaegertracing.io/inject: simple-prod + sidecar.jaegertracing.io/revision: "1" + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + sidecar.jaegertracing.io/injected: simple-prod + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.jaegertracing.io/inject: {} + f:sidecar.jaegertracing.io/revision: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"28781de4-3202-4aac-81b5-dc8411f7978f"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:sidecar.jaegertracing.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-query"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"JAEGER_DISABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"METRICS_STORAGE_TYPE"}: + .: {} + f:name: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":16685,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16686,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16687,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"name":"oauth-proxy"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":8443,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls/private"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"simple-prod-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-ui-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-ui-oauth-proxy-tls"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:46:26Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:46:26Z" name: simple-prod-query namespace: kuttl-test-premium-imp + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: simple-prod + uid: 28781de4-3202-4aac-81b5-dc8411f7978f spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + sidecar.jaegertracing.io/inject: simple-prod + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --query.ui-config=/etc/config/ui.json + - --es.server-urls=https://elasticsearch.kuttl-test-premium-imp.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: METRICS_STORAGE_TYPE + - name: JAEGER_DISABLED + value: "false" + - name: JAEGER_SERVICE_NAME + value: simple-prod.kuttl-test-premium-imp + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-query + ports: + - containerPort: 16685 + name: grpc-query + protocol: TCP + - containerPort: 16686 + name: query + protocol: TCP + - containerPort: 16687 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/config + name: simple-prod-ui-configuration-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + - args: + - --cookie-secret=STkwQ0aqpxkttNMU5WZK99 + - --https-address=:8443 + - '--openshift-sar={"namespace": "kuttl-test-premium-imp", "resource": "pods", + "verb": "get"}' + - --openshift-service-account=simple-prod-ui-proxy + - --provider=openshift + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:16686 + env: + - name: JAEGER_SERVICE_NAME + value: simple-prod.kuttl-test-premium-imp + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508 + imagePullPolicy: IfNotPresent + name: oauth-proxy + ports: + - containerPort: 8443 + name: public + protocol: TCP + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/tls/private + name: simple-prod-ui-oauth-proxy-tls + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - args: + - --agent.tags=cluster=undefined,deployment.name=simple-prod-query,host.ip=${HOST_IP:},pod.name=${POD_NAME:},pod.namespace=kuttl-test-premium-imp + - --reporter.grpc.host-port=dns:///simple-prod-collector-headless.kuttl-test-premium-imp.svc:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + env: + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: HOST_IP + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: status.hostIP + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent + ports: + - containerPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: simple-prod-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: simple-prod-ui-proxy + serviceAccountName: simple-prod-ui-proxy + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: ui + path: ui.json + name: simple-prod-ui-configuration + name: simple-prod-ui-configuration-volume + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: simple-prod-trusted-ca + name: simple-prod-trusted-ca + - name: simple-prod-ui-oauth-proxy-tls + secret: + defaultMode: 420 + secretName: simple-prod-ui-oauth-proxy-tls + - name: certs + secret: + defaultMode: 420 + secretName: simple-prod-jaeger-elasticsearch + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: simple-prod-service-ca + name: simple-prod-service-ca status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:41:25Z" + lastUpdateTime: "2024-09-19T01:41:25Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:41:25Z" + lastUpdateTime: "2024-09-19T01:41:25Z" + message: ReplicaSet "simple-prod-query-bfd547d75" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 3 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-premium-imp/simple-prod-query: .status.readyReplicas: key is missing from map logger.go:42: 01:51:21 | examples-simple-prod | examples-simple-prod events from ns kuttl-test-premium-imp: logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:25 +0000 UTC Normal Pod simple-prod-collector-67d4f9c874-wg9f6 Binding Scheduled Successfully assigned kuttl-test-premium-imp/simple-prod-collector-67d4f9c874-wg9f6 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:25 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-67d4f9c874 SuccessfulCreate Created pod: simple-prod-collector-67d4f9c874-wg9f6 replicaset-controller logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:25 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-67d4f9c874 to 1 deployment-controller logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:25 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk Binding Scheduled Successfully assigned kuttl-test-premium-imp/simple-prod-query-bfd547d75-hfqlk to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:25 +0000 UTC Normal ReplicaSet.apps simple-prod-query-bfd547d75 SuccessfulCreate Created pod: simple-prod-query-bfd547d75-hfqlk replicaset-controller logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:25 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-bfd547d75 to 1 deployment-controller logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-collector-67d4f9c874-wg9f6 AddedInterface Add eth0 [10.130.0.40/23] from ovn-kubernetes multus logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-collector-67d4f9c874-wg9f6.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-collector-67d4f9c874-wg9f6.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-collector-67d4f9c874-wg9f6.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk AddedInterface Add eth0 [10.130.0.41/23] from ovn-kubernetes multus logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:26 +0000 UTC Normal Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:27 +0000 UTC Warning Pod simple-prod-collector-67d4f9c874-wg9f6.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:27 +0000 UTC Warning Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-query} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:37 +0000 UTC Warning Pod simple-prod-collector-67d4f9c874-wg9f6.spec.containers{jaeger-collector} BackOff Back-off restarting failed container jaeger-collector in pod simple-prod-collector-67d4f9c874-wg9f6_kuttl-test-premium-imp(51403773-6383-4003-bb70-93aa341fa1de) kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:37 +0000 UTC Warning Pod simple-prod-query-bfd547d75-hfqlk.spec.containers{jaeger-query} BackOff Back-off restarting failed container jaeger-query in pod simple-prod-query-bfd547d75-hfqlk_kuttl-test-premium-imp(94b5d056-c3b3-43e6-ad71-318c6b980ed4) kubelet logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:41:40 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:50:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:50:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:51:21 | examples-simple-prod | 2024-09-19 01:50:41 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 01:51:21 | examples-simple-prod | Deleting namespace: kuttl-test-premium-imp === CONT kuttl/harness/examples-with-sampling logger.go:42: 01:51:28 | examples-with-sampling | Creating namespace: kuttl-test-legible-husky logger.go:42: 01:51:28 | examples-with-sampling/0-install | starting test step 0-install logger.go:42: 01:51:28 | examples-with-sampling/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 01:51:28 | examples-with-sampling/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 01:51:28 | examples-with-sampling/0-install | >>>> Creating namespace kuttl-test-legible-husky logger.go:42: 01:51:28 | examples-with-sampling/0-install | kubectl create namespace kuttl-test-legible-husky 2>&1 | grep -v "already exists" || true logger.go:42: 01:51:28 | examples-with-sampling/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-legible-husky 2>&1 | grep -v "already exists" || true logger.go:42: 01:51:29 | examples-with-sampling/0-install | service/cassandra created logger.go:42: 01:51:29 | examples-with-sampling/0-install | statefulset.apps/cassandra created logger.go:42: 01:51:29 | examples-with-sampling/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 01:51:43 | examples-with-sampling/0-install | test step completed 0-install logger.go:42: 01:51:43 | examples-with-sampling/1-install | starting test step 1-install logger.go:42: 01:51:43 | examples-with-sampling/1-install | Jaeger:kuttl-test-legible-husky/with-sampling created logger.go:42: 01:51:46 | examples-with-sampling/1-install | test step completed 1-install logger.go:42: 01:51:46 | examples-with-sampling/2-smoke-test | starting test step 2-smoke-test logger.go:42: 01:51:46 | examples-with-sampling/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-sampling /dev/null] logger.go:42: 01:51:48 | examples-with-sampling/2-smoke-test | Warning: resource jaegers/with-sampling is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 01:51:55 | examples-with-sampling/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://with-sampling-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-sampling-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 01:51:56 | examples-with-sampling/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 01:51:56 | examples-with-sampling/2-smoke-test | job.batch/report-span created logger.go:42: 01:51:56 | examples-with-sampling/2-smoke-test | job.batch/check-span created logger.go:42: 01:52:09 | examples-with-sampling/2-smoke-test | test step completed 2-smoke-test logger.go:42: 01:52:09 | examples-with-sampling/3- | starting test step 3- logger.go:42: 01:52:10 | examples-with-sampling/3- | test step completed 3- logger.go:42: 01:52:10 | examples-with-sampling | examples-with-sampling events from ns kuttl-test-legible-husky: logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:29 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-legible-husky/cassandra-0 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:29 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.130.0.43/23] from ovn-kubernetes multus logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:29 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:29 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:35 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 5.631s (5.631s including waiting) kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:35 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:35 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:36 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-legible-husky/cassandra-1 to ip-10-0-52-194.us-west-2.compute.internal default-scheduler logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:36 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.129.0.29/23] from ovn-kubernetes multus logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:36 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulling Pulling image "cassandra:3.11" kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:36 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:42 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Successfully pulled image "cassandra:3.11" in 5.724s (5.724s including waiting) kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:42 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:42 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:43 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f Binding Scheduled Successfully assigned kuttl-test-legible-husky/with-sampling-764dc9f8f-2l85f to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:43 +0000 UTC Normal ReplicaSet.apps with-sampling-764dc9f8f SuccessfulCreate Created pod: with-sampling-764dc9f8f-2l85f replicaset-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:43 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-764dc9f8f to 1 deployment-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:44 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f AddedInterface Add eth0 [10.130.0.44/23] from ovn-kubernetes multus logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:44 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:44 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:44 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:44 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:44 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:44 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:49 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:49 +0000 UTC Normal Pod with-sampling-764dc9f8f-2l85f.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:49 +0000 UTC Normal ReplicaSet.apps with-sampling-764dc9f8f SuccessfulDelete Deleted pod: with-sampling-764dc9f8f-2l85f replicaset-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:49 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled down replica set with-sampling-764dc9f8f to 0 from 1 deployment-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d Binding Scheduled Successfully assigned kuttl-test-legible-husky/with-sampling-58c5bd9759-fzd6d to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d AddedInterface Add eth0 [10.130.0.45/23] from ovn-kubernetes multus logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal ReplicaSet.apps with-sampling-58c5bd9759 SuccessfulCreate Created pod: with-sampling-58c5bd9759-fzd6d replicaset-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:50 +0000 UTC Normal Deployment.apps with-sampling ScalingReplicaSet Scaled up replica set with-sampling-58c5bd9759 to 1 deployment-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:51 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:51 +0000 UTC Normal Pod with-sampling-58c5bd9759-fzd6d.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:56 +0000 UTC Normal Pod check-span-9ps6k Binding Scheduled Successfully assigned kuttl-test-legible-husky/check-span-9ps6k to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:56 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-9ps6k job-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:56 +0000 UTC Normal Pod report-span-bh5s5 Binding Scheduled Successfully assigned kuttl-test-legible-husky/report-span-bh5s5 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:56 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-bh5s5 job-controller logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:57 +0000 UTC Normal Pod check-span-9ps6k AddedInterface Add eth0 [10.130.0.47/23] from ovn-kubernetes multus logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:57 +0000 UTC Normal Pod check-span-9ps6k.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:57 +0000 UTC Normal Pod report-span-bh5s5 AddedInterface Add eth0 [10.130.0.46/23] from ovn-kubernetes multus logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:57 +0000 UTC Normal Pod report-span-bh5s5.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:58 +0000 UTC Normal Pod check-span-9ps6k.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 861ms (861ms including waiting) kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:58 +0000 UTC Normal Pod check-span-9ps6k.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:58 +0000 UTC Normal Pod check-span-9ps6k.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:58 +0000 UTC Normal Pod report-span-bh5s5.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 928ms (928ms including waiting) kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:58 +0000 UTC Normal Pod report-span-bh5s5.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:51:58 +0000 UTC Normal Pod report-span-bh5s5.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 01:52:10 | examples-with-sampling | 2024-09-19 01:52:09 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 01:52:10 | examples-with-sampling | Deleting namespace: kuttl-test-legible-husky === CONT kuttl/harness/examples-with-cassandra logger.go:42: 01:55:32 | examples-with-cassandra | Creating namespace: kuttl-test-huge-terrier logger.go:42: 01:55:32 | examples-with-cassandra/0-install | starting test step 0-install logger.go:42: 01:55:32 | examples-with-cassandra/0-install | running command: [sh -c cd /tmp/jaeger-tests && make cassandra STORAGE_NAMESPACE=$NAMESPACE] logger.go:42: 01:55:32 | examples-with-cassandra/0-install | make[2]: Entering directory '/tmp/jaeger-tests' logger.go:42: 01:55:32 | examples-with-cassandra/0-install | >>>> Creating namespace kuttl-test-huge-terrier logger.go:42: 01:55:32 | examples-with-cassandra/0-install | kubectl create namespace kuttl-test-huge-terrier 2>&1 | grep -v "already exists" || true logger.go:42: 01:55:32 | examples-with-cassandra/0-install | kubectl create -f ./tests/cassandra.yml --namespace kuttl-test-huge-terrier 2>&1 | grep -v "already exists" || true logger.go:42: 01:55:33 | examples-with-cassandra/0-install | service/cassandra created logger.go:42: 01:55:33 | examples-with-cassandra/0-install | statefulset.apps/cassandra created logger.go:42: 01:55:33 | examples-with-cassandra/0-install | make[2]: Leaving directory '/tmp/jaeger-tests' logger.go:42: 01:55:35 | examples-with-cassandra/0-install | test step completed 0-install logger.go:42: 01:55:35 | examples-with-cassandra/1-install | starting test step 1-install logger.go:42: 01:55:36 | examples-with-cassandra/1-install | Jaeger:kuttl-test-huge-terrier/with-cassandra created logger.go:42: 01:56:10 | examples-with-cassandra/1-install | test step completed 1-install logger.go:42: 01:56:10 | examples-with-cassandra/2-smoke-test | starting test step 2-smoke-test logger.go:42: 01:56:10 | examples-with-cassandra/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-cassandra /dev/null] logger.go:42: 01:56:12 | examples-with-cassandra/2-smoke-test | Warning: resource jaegers/with-cassandra is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 01:56:19 | examples-with-cassandra/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://with-cassandra-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-cassandra-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 01:56:20 | examples-with-cassandra/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 01:56:20 | examples-with-cassandra/2-smoke-test | job.batch/report-span created logger.go:42: 01:56:20 | examples-with-cassandra/2-smoke-test | job.batch/check-span created logger.go:42: 01:56:33 | examples-with-cassandra/2-smoke-test | test step completed 2-smoke-test logger.go:42: 01:56:34 | examples-with-cassandra | examples-with-cassandra events from ns kuttl-test-huge-terrier: logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:33 +0000 UTC Normal Pod cassandra-0 Binding Scheduled Successfully assigned kuttl-test-huge-terrier/cassandra-0 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:33 +0000 UTC Normal Pod cassandra-0 AddedInterface Add eth0 [10.130.0.48/23] from ovn-kubernetes multus logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:33 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:33 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:33 +0000 UTC Normal Pod cassandra-0.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:33 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-0 in StatefulSet cassandra successful statefulset-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:34 +0000 UTC Normal Pod cassandra-1 Binding Scheduled Successfully assigned kuttl-test-huge-terrier/cassandra-1 to ip-10-0-52-194.us-west-2.compute.internal default-scheduler logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:34 +0000 UTC Normal StatefulSet.apps cassandra SuccessfulCreate create Pod cassandra-1 in StatefulSet cassandra successful statefulset-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:35 +0000 UTC Normal Pod cassandra-1 AddedInterface Add eth0 [10.129.0.30/23] from ovn-kubernetes multus logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:35 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Pulled Container image "cassandra:3.11" already present on machine kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:35 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Created Created container cassandra kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:35 +0000 UTC Normal Pod cassandra-1.spec.containers{cassandra} Started Started container cassandra kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:36 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fzhfm Binding Scheduled Successfully assigned kuttl-test-huge-terrier/with-cassandra-cassandra-schema-job-fzhfm to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:36 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fzhfm AddedInterface Add eth0 [10.130.0.49/23] from ovn-kubernetes multus logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:36 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fzhfm.spec.containers{with-cassandra-cassandra-schema-job} Pulling Pulling image "jaegertracing/jaeger-cassandra-schema:1.57.0" kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:36 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job SuccessfulCreate Created pod: with-cassandra-cassandra-schema-job-fzhfm job-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:43 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fzhfm.spec.containers{with-cassandra-cassandra-schema-job} Pulled Successfully pulled image "jaegertracing/jaeger-cassandra-schema:1.57.0" in 6.498s (6.498s including waiting) kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:43 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fzhfm.spec.containers{with-cassandra-cassandra-schema-job} Created Created container with-cassandra-cassandra-schema-job kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:43 +0000 UTC Normal Pod with-cassandra-cassandra-schema-job-fzhfm.spec.containers{with-cassandra-cassandra-schema-job} Started Started container with-cassandra-cassandra-schema-job kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:51 +0000 UTC Normal Job.batch with-cassandra-cassandra-schema-job Completed Job completed job-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:52 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27 Binding Scheduled Successfully assigned kuttl-test-huge-terrier/with-cassandra-d84f955bb-j2p27 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:52 +0000 UTC Normal ReplicaSet.apps with-cassandra-d84f955bb SuccessfulCreate Created pod: with-cassandra-d84f955bb-j2p27 replicaset-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:52 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-d84f955bb to 1 deployment-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:53 +0000 UTC Warning Pod with-cassandra-d84f955bb-j2p27 FailedMount MountVolume.SetUp failed for volume "with-cassandra-ui-oauth-proxy-tls" : secret "with-cassandra-ui-oauth-proxy-tls" not found kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:53 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27 AddedInterface Add eth0 [10.130.0.50/23] from ovn-kubernetes multus logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:53 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:53 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:53 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:53 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:54 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:54 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:55:55 +0000 UTC Warning Pod with-cassandra-d84f955bb-j2p27.spec.containers{jaeger} BackOff Back-off restarting failed container jaeger in pod with-cassandra-d84f955bb-j2p27_kuttl-test-huge-terrier(15f3239c-fc2c-4373-b4b7-698ecaa36f13) kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:13 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps Binding Scheduled Successfully assigned kuttl-test-huge-terrier/with-cassandra-7949cd665d-rg4ps to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:13 +0000 UTC Normal ReplicaSet.apps with-cassandra-7949cd665d SuccessfulCreate Created pod: with-cassandra-7949cd665d-rg4ps replicaset-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:13 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:13 +0000 UTC Normal Pod with-cassandra-d84f955bb-j2p27.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:13 +0000 UTC Normal ReplicaSet.apps with-cassandra-d84f955bb SuccessfulDelete Deleted pod: with-cassandra-d84f955bb-j2p27 replicaset-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:13 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled down replica set with-cassandra-d84f955bb to 0 from 1 deployment-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:13 +0000 UTC Normal Deployment.apps with-cassandra ScalingReplicaSet Scaled up replica set with-cassandra-7949cd665d to 1 deployment-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:14 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps AddedInterface Add eth0 [10.130.0.51/23] from ovn-kubernetes multus logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:14 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:14 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:14 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:14 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:14 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:14 +0000 UTC Normal Pod with-cassandra-7949cd665d-rg4ps.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:20 +0000 UTC Normal Pod check-span-rh4bq Binding Scheduled Successfully assigned kuttl-test-huge-terrier/check-span-rh4bq to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:20 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-rh4bq job-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:20 +0000 UTC Normal Pod report-span-7q4j4 Binding Scheduled Successfully assigned kuttl-test-huge-terrier/report-span-7q4j4 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:20 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-7q4j4 job-controller logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:21 +0000 UTC Normal Pod check-span-rh4bq AddedInterface Add eth0 [10.130.0.53/23] from ovn-kubernetes multus logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:21 +0000 UTC Normal Pod check-span-rh4bq.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:21 +0000 UTC Normal Pod report-span-7q4j4 AddedInterface Add eth0 [10.130.0.52/23] from ovn-kubernetes multus logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:21 +0000 UTC Normal Pod report-span-7q4j4.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:22 +0000 UTC Normal Pod check-span-rh4bq.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 773ms (773ms including waiting) kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:22 +0000 UTC Normal Pod check-span-rh4bq.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:22 +0000 UTC Normal Pod check-span-rh4bq.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:22 +0000 UTC Normal Pod report-span-7q4j4.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 856ms (856ms including waiting) kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:22 +0000 UTC Normal Pod report-span-7q4j4.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:22 +0000 UTC Normal Pod report-span-7q4j4.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 01:56:34 | examples-with-cassandra | 2024-09-19 01:56:32 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 01:56:34 | examples-with-cassandra | Deleting namespace: kuttl-test-huge-terrier === CONT kuttl/harness/examples-with-badger logger.go:42: 01:56:46 | examples-with-badger | Creating namespace: kuttl-test-enhanced-gannet logger.go:42: 01:56:46 | examples-with-badger/0-install | starting test step 0-install logger.go:42: 01:56:46 | examples-with-badger/0-install | Jaeger:kuttl-test-enhanced-gannet/with-badger created logger.go:42: 01:56:51 | examples-with-badger/0-install | test step completed 0-install logger.go:42: 01:56:51 | examples-with-badger/1-smoke-test | starting test step 1-smoke-test logger.go:42: 01:56:51 | examples-with-badger/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE with-badger /dev/null] logger.go:42: 01:56:52 | examples-with-badger/1-smoke-test | Warning: resource jaegers/with-badger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 01:57:00 | examples-with-badger/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://with-badger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://with-badger-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 01:57:00 | examples-with-badger/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 01:57:00 | examples-with-badger/1-smoke-test | job.batch/report-span created logger.go:42: 01:57:01 | examples-with-badger/1-smoke-test | job.batch/check-span created logger.go:42: 01:57:14 | examples-with-badger/1-smoke-test | test step completed 1-smoke-test logger.go:42: 01:57:14 | examples-with-badger | examples-with-badger events from ns kuttl-test-enhanced-gannet: logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:46 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-c56c798bd to 1 deployment-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:47 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26 Binding Scheduled Successfully assigned kuttl-test-enhanced-gannet/with-badger-c56c798bd-rbq26 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:47 +0000 UTC Normal ReplicaSet.apps with-badger-c56c798bd SuccessfulCreate Created pod: with-badger-c56c798bd-rbq26 replicaset-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:48 +0000 UTC Warning Pod with-badger-c56c798bd-rbq26 FailedMount MountVolume.SetUp failed for volume "with-badger-ui-configuration-volume" : failed to sync configmap cache: timed out waiting for the condition kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:48 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26 AddedInterface Add eth0 [10.130.0.54/23] from ovn-kubernetes multus logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:48 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:49 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:49 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:49 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:49 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:49 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:53 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:53 +0000 UTC Normal Pod with-badger-c56c798bd-rbq26.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:53 +0000 UTC Normal ReplicaSet.apps with-badger-c56c798bd SuccessfulDelete Deleted pod: with-badger-c56c798bd-rbq26 replicaset-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:53 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled down replica set with-badger-c56c798bd to 0 from 1 deployment-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr Binding Scheduled Successfully assigned kuttl-test-enhanced-gannet/with-badger-7cc664f657-8h7nr to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr AddedInterface Add eth0 [10.130.0.55/23] from ovn-kubernetes multus logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Pod with-badger-7cc664f657-8h7nr.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal ReplicaSet.apps with-badger-7cc664f657 SuccessfulCreate Created pod: with-badger-7cc664f657-8h7nr replicaset-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:56:55 +0000 UTC Normal Deployment.apps with-badger ScalingReplicaSet Scaled up replica set with-badger-7cc664f657 to 1 deployment-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:00 +0000 UTC Normal Pod report-span-89qwj Binding Scheduled Successfully assigned kuttl-test-enhanced-gannet/report-span-89qwj to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:00 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-89qwj job-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:01 +0000 UTC Normal Pod check-span-htfcl Binding Scheduled Successfully assigned kuttl-test-enhanced-gannet/check-span-htfcl to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:01 +0000 UTC Normal Pod check-span-htfcl AddedInterface Add eth0 [10.130.0.57/23] from ovn-kubernetes multus logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:01 +0000 UTC Normal Pod check-span-htfcl.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:01 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-htfcl job-controller logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:01 +0000 UTC Normal Pod report-span-89qwj AddedInterface Add eth0 [10.130.0.56/23] from ovn-kubernetes multus logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:01 +0000 UTC Normal Pod report-span-89qwj.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:02 +0000 UTC Normal Pod check-span-htfcl.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 815ms (815ms including waiting) kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:02 +0000 UTC Normal Pod check-span-htfcl.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:02 +0000 UTC Normal Pod check-span-htfcl.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:02 +0000 UTC Normal Pod report-span-89qwj.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 862ms (862ms including waiting) kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:02 +0000 UTC Normal Pod report-span-89qwj.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:02 +0000 UTC Normal Pod report-span-89qwj.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 01:57:14 | examples-with-badger | 2024-09-19 01:57:13 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 01:57:14 | examples-with-badger | Deleting namespace: kuttl-test-enhanced-gannet === CONT kuttl/harness/examples-simplest logger.go:42: 01:57:26 | examples-simplest | Creating namespace: kuttl-test-generous-shrew logger.go:42: 01:57:26 | examples-simplest/0-install | starting test step 0-install logger.go:42: 01:57:26 | examples-simplest/0-install | Jaeger:kuttl-test-generous-shrew/simplest created logger.go:42: 01:57:28 | examples-simplest/0-install | test step completed 0-install logger.go:42: 01:57:28 | examples-simplest/1-smoke-test | starting test step 1-smoke-test logger.go:42: 01:57:28 | examples-simplest/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 01:57:30 | examples-simplest/1-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 01:57:37 | examples-simplest/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 01:57:38 | examples-simplest/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 01:57:38 | examples-simplest/1-smoke-test | job.batch/report-span created logger.go:42: 01:57:38 | examples-simplest/1-smoke-test | job.batch/check-span created logger.go:42: 01:57:51 | examples-simplest/1-smoke-test | test step completed 1-smoke-test logger.go:42: 01:57:52 | examples-simplest | examples-simplest events from ns kuttl-test-generous-shrew: logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx Binding Scheduled Successfully assigned kuttl-test-generous-shrew/simplest-745c99687-858sx to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx AddedInterface Add eth0 [10.130.0.58/23] from ovn-kubernetes multus logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal ReplicaSet.apps simplest-745c99687 SuccessfulCreate Created pod: simplest-745c99687-858sx replicaset-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:27 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-745c99687 to 1 deployment-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:31 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:31 +0000 UTC Normal Pod simplest-745c99687-858sx.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:31 +0000 UTC Normal ReplicaSet.apps simplest-745c99687 SuccessfulDelete Deleted pod: simplest-745c99687-858sx replicaset-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:31 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-745c99687 to 0 from 1 deployment-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst Binding Scheduled Successfully assigned kuttl-test-generous-shrew/simplest-6dfcf557bb-ckfst to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst AddedInterface Add eth0 [10.130.0.59/23] from ovn-kubernetes multus logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Pod simplest-6dfcf557bb-ckfst.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal ReplicaSet.apps simplest-6dfcf557bb SuccessfulCreate Created pod: simplest-6dfcf557bb-ckfst replicaset-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:32 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-6dfcf557bb to 1 deployment-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:38 +0000 UTC Normal Pod check-span-2tqtt Binding Scheduled Successfully assigned kuttl-test-generous-shrew/check-span-2tqtt to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:38 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-2tqtt job-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:38 +0000 UTC Normal Pod report-span-99lzc Binding Scheduled Successfully assigned kuttl-test-generous-shrew/report-span-99lzc to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:38 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-99lzc job-controller logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:39 +0000 UTC Normal Pod check-span-2tqtt AddedInterface Add eth0 [10.130.0.61/23] from ovn-kubernetes multus logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:39 +0000 UTC Normal Pod check-span-2tqtt.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:39 +0000 UTC Normal Pod report-span-99lzc AddedInterface Add eth0 [10.130.0.60/23] from ovn-kubernetes multus logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:39 +0000 UTC Normal Pod report-span-99lzc.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:40 +0000 UTC Normal Pod check-span-2tqtt.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 824ms (824ms including waiting) kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:40 +0000 UTC Normal Pod check-span-2tqtt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:40 +0000 UTC Normal Pod check-span-2tqtt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:40 +0000 UTC Normal Pod report-span-99lzc.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 798ms (798ms including waiting) kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:40 +0000 UTC Normal Pod report-span-99lzc.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:40 +0000 UTC Normal Pod report-span-99lzc.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 01:57:52 | examples-simplest | 2024-09-19 01:57:51 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 01:57:52 | examples-simplest | Deleting namespace: kuttl-test-generous-shrew === CONT kuttl/harness/examples-simple-prod-with-volumes logger.go:42: 01:58:04 | examples-simple-prod-with-volumes | Ignoring 03-check-volume.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 01:58:04 | examples-simple-prod-with-volumes | Creating namespace: kuttl-test-hip-blowfish logger.go:42: 01:58:04 | examples-simple-prod-with-volumes/1-install | starting test step 1-install logger.go:42: 01:58:04 | examples-simple-prod-with-volumes/1-install | Jaeger:kuttl-test-hip-blowfish/simple-prod created logger.go:42: 02:08:05 | examples-simple-prod-with-volumes/1-install | test step failed 1-install case.go:364: failed in step 1-install case.go:366: --- Deployment:kuttl-test-hip-blowfish/simple-prod-collector +++ Deployment:kuttl-test-hip-blowfish/simple-prod-collector @@ -1,10 +1,424 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"d0881349-5562-4923-b492-59e0ab8a7ea3"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-collector"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"COLLECTOR_OTLP_ENABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"COLLECTOR_ZIPKIN_HOST_PORT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":4317,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":4318,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":9411,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14250,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14267,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14268,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14269,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/jaeger/sampling"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls-config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/usr/share/elasticsearch/data"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"elastic-data"}: + .: {} + f:emptyDir: {} + f:name: {} + k:{"name":"simple-prod-collector-tls-config-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"simple-prod-sampling-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T01:58:11Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T01:58:11Z" name: simple-prod-collector namespace: kuttl-test-hip-blowfish + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: simple-prod + uid: d0881349-5562-4923-b492-59e0ab8a7ea3 spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-collector + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --collector.grpc.tls.cert=/etc/tls-config/tls.crt + - --collector.grpc.tls.enabled=true + - --collector.grpc.tls.key=/etc/tls-config/tls.key + - --sampling.strategies-file=/etc/jaeger/sampling/sampling.json + - --es.server-urls=https://elasticsearch.kuttl-test-hip-blowfish.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: COLLECTOR_ZIPKIN_HOST_PORT + value: :9411 + - name: COLLECTOR_OTLP_ENABLED + value: "true" + image: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-collector + ports: + - containerPort: 9411 + name: zipkin + protocol: TCP + - containerPort: 14267 + name: c-tchan-trft + protocol: TCP + - containerPort: 14268 + name: c-binary-trft + protocol: TCP + - containerPort: 14269 + name: admin-http + protocol: TCP + - containerPort: 14250 + name: grpc + protocol: TCP + - containerPort: 4317 + name: grpc-otlp + protocol: TCP + - containerPort: 4318 + name: http-otlp + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /usr/share/elasticsearch/data + name: elastic-data + - mountPath: /etc/jaeger/sampling + name: simple-prod-sampling-configuration-volume + readOnly: true + - mountPath: /etc/tls-config + name: simple-prod-collector-tls-config-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: simple-prod + serviceAccountName: simple-prod + terminationGracePeriodSeconds: 30 + volumes: + - emptyDir: {} + name: elastic-data + - configMap: + defaultMode: 420 + items: + - key: sampling + path: sampling.json + name: simple-prod-sampling-configuration + name: simple-prod-sampling-configuration-volume + - name: simple-prod-collector-tls-config-volume + secret: + defaultMode: 420 + secretName: simple-prod-collector-headless-tls + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: simple-prod-trusted-ca + name: simple-prod-trusted-ca + - name: certs + secret: + defaultMode: 420 + secretName: simple-prod-jaeger-elasticsearch status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:58:11Z" + lastUpdateTime: "2024-09-19T01:58:11Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:58:11Z" + lastUpdateTime: "2024-09-19T01:58:11Z" + message: ReplicaSet "simple-prod-collector-784556499d" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 1 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-hip-blowfish/simple-prod-collector: .status.readyReplicas: key is missing from map case.go:366: --- Deployment:kuttl-test-hip-blowfish/simple-prod-query +++ Deployment:kuttl-test-hip-blowfish/simple-prod-query @@ -1,10 +1,520 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.jaegertracing.io/inject: simple-prod + sidecar.jaegertracing.io/revision: "1" + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + sidecar.jaegertracing.io/injected: simple-prod + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.jaegertracing.io/inject: {} + f:sidecar.jaegertracing.io/revision: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"d0881349-5562-4923-b492-59e0ab8a7ea3"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:sidecar.jaegertracing.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-query"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"JAEGER_DISABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"METRICS_STORAGE_TYPE"}: + .: {} + f:name: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":16685,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16686,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16687,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/usr/share/elasticsearch/data"}: + .: {} + f:mountPath: {} + f:name: {} + k:{"name":"oauth-proxy"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":8443,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls/private"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"elastic-data"}: + .: {} + f:emptyDir: {} + f:name: {} + k:{"name":"simple-prod-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-ui-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"simple-prod-ui-oauth-proxy-tls"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T02:03:11Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T02:03:11Z" name: simple-prod-query namespace: kuttl-test-hip-blowfish + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: simple-prod + uid: d0881349-5562-4923-b492-59e0ab8a7ea3 spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + sidecar.jaegertracing.io/inject: simple-prod + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: simple-prod + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: simple-prod-query + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --query.ui-config=/etc/config/ui.json + - --es.server-urls=https://elasticsearch.kuttl-test-hip-blowfish.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: METRICS_STORAGE_TYPE + - name: JAEGER_DISABLED + value: "false" + - name: JAEGER_SERVICE_NAME + value: simple-prod.kuttl-test-hip-blowfish + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-query + ports: + - containerPort: 16685 + name: grpc-query + protocol: TCP + - containerPort: 16686 + name: query + protocol: TCP + - containerPort: 16687 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /usr/share/elasticsearch/data + name: elastic-data + - mountPath: /etc/config + name: simple-prod-ui-configuration-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + - args: + - --cookie-secret=4Hdawrg2CiGiQkQc6tGEOR + - --https-address=:8443 + - '--openshift-sar={"namespace": "kuttl-test-hip-blowfish", "resource": "pods", + "verb": "get"}' + - --openshift-service-account=simple-prod-ui-proxy + - --provider=openshift + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:16686 + env: + - name: JAEGER_SERVICE_NAME + value: simple-prod.kuttl-test-hip-blowfish + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508 + imagePullPolicy: IfNotPresent + name: oauth-proxy + ports: + - containerPort: 8443 + name: public + protocol: TCP + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/tls/private + name: simple-prod-ui-oauth-proxy-tls + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - args: + - --agent.tags=cluster=undefined,deployment.name=simple-prod-query,host.ip=${HOST_IP:},pod.name=${POD_NAME:},pod.namespace=kuttl-test-hip-blowfish + - --reporter.grpc.host-port=dns:///simple-prod-collector-headless.kuttl-test-hip-blowfish.svc:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + env: + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: HOST_IP + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: status.hostIP + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent + ports: + - containerPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: simple-prod-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: simple-prod-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: simple-prod-ui-proxy + serviceAccountName: simple-prod-ui-proxy + terminationGracePeriodSeconds: 30 + volumes: + - emptyDir: {} + name: elastic-data + - configMap: + defaultMode: 420 + items: + - key: ui + path: ui.json + name: simple-prod-ui-configuration + name: simple-prod-ui-configuration-volume + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: simple-prod-trusted-ca + name: simple-prod-trusted-ca + - name: simple-prod-ui-oauth-proxy-tls + secret: + defaultMode: 420 + secretName: simple-prod-ui-oauth-proxy-tls + - name: certs + secret: + defaultMode: 420 + secretName: simple-prod-jaeger-elasticsearch + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: simple-prod-service-ca + name: simple-prod-service-ca status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T01:58:11Z" + lastUpdateTime: "2024-09-19T01:58:11Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T01:58:11Z" + lastUpdateTime: "2024-09-19T01:58:11Z" + message: ReplicaSet "simple-prod-query-9869654b9" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 3 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-hip-blowfish/simple-prod-query: .status.readyReplicas: key is missing from map logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | examples-simple-prod-with-volumes events from ns kuttl-test-hip-blowfish: logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-collector-784556499d-cj8qr Binding Scheduled Successfully assigned kuttl-test-hip-blowfish/simple-prod-collector-784556499d-cj8qr to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-collector-784556499d-cj8qr AddedInterface Add eth0 [10.130.0.62/23] from ovn-kubernetes multus logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-collector-784556499d-cj8qr.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-collector-784556499d-cj8qr.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-collector-784556499d-cj8qr.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal ReplicaSet.apps simple-prod-collector-784556499d SuccessfulCreate Created pod: simple-prod-collector-784556499d-cj8qr replicaset-controller logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Deployment.apps simple-prod-collector ScalingReplicaSet Scaled up replica set simple-prod-collector-784556499d to 1 deployment-controller logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82 Binding Scheduled Successfully assigned kuttl-test-hip-blowfish/simple-prod-query-9869654b9-psv82 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82 AddedInterface Add eth0 [10.130.0.63/23] from ovn-kubernetes multus logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal ReplicaSet.apps simple-prod-query-9869654b9 SuccessfulCreate Created pod: simple-prod-query-9869654b9-psv82 replicaset-controller logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:11 +0000 UTC Normal Deployment.apps simple-prod-query ScalingReplicaSet Scaled up replica set simple-prod-query-9869654b9 to 1 deployment-controller logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:12 +0000 UTC Warning Pod simple-prod-collector-784556499d-cj8qr.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:12 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:12 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:12 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:12 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:12 +0000 UTC Normal Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:13 +0000 UTC Warning Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-query} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:23 +0000 UTC Warning Pod simple-prod-collector-784556499d-cj8qr.spec.containers{jaeger-collector} BackOff Back-off restarting failed container jaeger-collector in pod simple-prod-collector-784556499d-cj8qr_kuttl-test-hip-blowfish(22a2ee5d-b495-4faf-91c3-1546cfdf9403) kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:23 +0000 UTC Warning Pod simple-prod-query-9869654b9-psv82.spec.containers{jaeger-query} BackOff Back-off restarting failed container jaeger-query in pod simple-prod-query-9869654b9-psv82_kuttl-test-hip-blowfish(2f4a3913-5098-43c2-85b8-abedddb88016) kubelet logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:26 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:26 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | 2024-09-19 01:58:26 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling simple-prod-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 02:08:05 | examples-simple-prod-with-volumes | Deleting namespace: kuttl-test-hip-blowfish === CONT kuttl/harness/examples-business-application-injected-sidecar logger.go:42: 02:08:11 | examples-business-application-injected-sidecar | Creating namespace: kuttl-test-equal-lacewing logger.go:42: 02:08:11 | examples-business-application-injected-sidecar/0-install | starting test step 0-install logger.go:42: 02:08:12 | examples-business-application-injected-sidecar/0-install | Deployment:kuttl-test-equal-lacewing/myapp created logger.go:42: 02:08:12 | examples-business-application-injected-sidecar/0-install | test step completed 0-install logger.go:42: 02:08:12 | examples-business-application-injected-sidecar/1-install | starting test step 1-install logger.go:42: 02:08:12 | examples-business-application-injected-sidecar/1-install | Jaeger:kuttl-test-equal-lacewing/simplest created logger.go:42: 02:08:16 | examples-business-application-injected-sidecar/1-install | test step completed 1-install logger.go:42: 02:08:16 | examples-business-application-injected-sidecar/2-smoke-test | starting test step 2-smoke-test logger.go:42: 02:08:16 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE simplest /dev/null] logger.go:42: 02:08:17 | examples-business-application-injected-sidecar/2-smoke-test | Warning: resource jaegers/simplest is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 02:08:24 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://simplest-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://simplest-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 02:08:25 | examples-business-application-injected-sidecar/2-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 02:08:25 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/report-span created logger.go:42: 02:08:26 | examples-business-application-injected-sidecar/2-smoke-test | job.batch/check-span created logger.go:42: 02:08:33 | examples-business-application-injected-sidecar/2-smoke-test | test step completed 2-smoke-test logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | examples-business-application-injected-sidecar events from ns kuttl-test-equal-lacewing: logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal Pod myapp-679f79d5f8-jvn68 Binding Scheduled Successfully assigned kuttl-test-equal-lacewing/myapp-679f79d5f8-jvn68 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal Pod myapp-679f79d5f8-jvn68 AddedInterface Add eth0 [10.130.0.65/23] from ovn-kubernetes multus logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal Pod myapp-679f79d5f8-jvn68.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulCreate Created pod: myapp-679f79d5f8-jvn68 replicaset-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal Pod myapp-7669868dc-hsv95 Binding Scheduled Successfully assigned kuttl-test-equal-lacewing/myapp-7669868dc-hsv95 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal ReplicaSet.apps myapp-7669868dc SuccessfulCreate Created pod: myapp-7669868dc-hsv95 replicaset-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-679f79d5f8 to 1 deployment-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:12 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled up replica set myapp-7669868dc to 1 deployment-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:13 +0000 UTC Warning Pod myapp-7669868dc-hsv95 FailedMount MountVolume.SetUp failed for volume "simplest-trusted-ca" : configmap "simplest-trusted-ca" not found kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:13 +0000 UTC Warning Pod myapp-7669868dc-hsv95 FailedMount MountVolume.SetUp failed for volume "simplest-service-ca" : configmap "simplest-service-ca" not found kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:13 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq Binding Scheduled Successfully assigned kuttl-test-equal-lacewing/simplest-79c59d6495-lm7xq to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:13 +0000 UTC Normal ReplicaSet.apps simplest-79c59d6495 SuccessfulCreate Created pod: simplest-79c59d6495-lm7xq replicaset-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:13 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-79c59d6495 to 1 deployment-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:14 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq AddedInterface Add eth0 [10.130.0.67/23] from ovn-kubernetes multus logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:14 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:14 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:14 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:14 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:14 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:14 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:15 +0000 UTC Normal Pod myapp-7669868dc-hsv95 AddedInterface Add eth0 [10.130.0.66/23] from ovn-kubernetes multus logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:15 +0000 UTC Normal Pod myapp-7669868dc-hsv95.spec.containers{myapp} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-679f79d5f8-jvn68.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 5.384s (5.384s including waiting) kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-679f79d5f8-jvn68.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-679f79d5f8-jvn68.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-7669868dc-hsv95.spec.containers{myapp} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 3.233s (3.233s including waiting) kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-7669868dc-hsv95.spec.containers{myapp} Created Created container myapp kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-7669868dc-hsv95.spec.containers{myapp} Started Started container myapp kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-7669868dc-hsv95.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-7669868dc-hsv95.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod myapp-7669868dc-hsv95.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Pod simplest-79c59d6495-lm7xq.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal ReplicaSet.apps simplest-79c59d6495 SuccessfulDelete Deleted pod: simplest-79c59d6495-lm7xq replicaset-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:18 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled down replica set simplest-79c59d6495 to 0 from 1 deployment-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:19 +0000 UTC Normal ReplicaSet.apps myapp-679f79d5f8 SuccessfulDelete Deleted pod: myapp-679f79d5f8-jvn68 replicaset-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:19 +0000 UTC Normal Deployment.apps myapp ScalingReplicaSet Scaled down replica set myapp-679f79d5f8 to 0 from 1 deployment-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:19 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j Binding Scheduled Successfully assigned kuttl-test-equal-lacewing/simplest-76c5f76997-p5v9j to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:19 +0000 UTC Normal ReplicaSet.apps simplest-76c5f76997 SuccessfulCreate Created pod: simplest-76c5f76997-p5v9j replicaset-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:19 +0000 UTC Normal Deployment.apps simplest ScalingReplicaSet Scaled up replica set simplest-76c5f76997 to 1 deployment-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod myapp-679f79d5f8-jvn68.spec.containers{myapp} Killing Stopping container myapp kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j AddedInterface Add eth0 [10.130.0.68/23] from ovn-kubernetes multus logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:20 +0000 UTC Normal Pod simplest-76c5f76997-p5v9j.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:25 +0000 UTC Warning Pod myapp-7669868dc-hsv95.spec.containers{myapp} Unhealthy Liveness probe failed: Get "http://10.130.0.66:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:25 +0000 UTC Normal Pod report-span-kj8c9 Binding Scheduled Successfully assigned kuttl-test-equal-lacewing/report-span-kj8c9 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:25 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-kj8c9 job-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:26 +0000 UTC Normal Pod check-span-cpf6m Binding Scheduled Successfully assigned kuttl-test-equal-lacewing/check-span-cpf6m to ip-10-0-52-194.us-west-2.compute.internal default-scheduler logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:26 +0000 UTC Normal Pod check-span-cpf6m AddedInterface Add eth0 [10.129.0.31/23] from ovn-kubernetes multus logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:26 +0000 UTC Normal Pod check-span-cpf6m.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:26 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-cpf6m job-controller logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:26 +0000 UTC Normal Pod report-span-kj8c9 AddedInterface Add eth0 [10.130.0.69/23] from ovn-kubernetes multus logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:26 +0000 UTC Normal Pod report-span-kj8c9.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:27 +0000 UTC Normal Pod report-span-kj8c9.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 807ms (807ms including waiting) kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:27 +0000 UTC Normal Pod report-span-kj8c9.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:27 +0000 UTC Normal Pod report-span-kj8c9.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:29 +0000 UTC Normal Pod check-span-cpf6m.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 2.897s (2.897s including waiting) kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:29 +0000 UTC Normal Pod check-span-cpf6m.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:29 +0000 UTC Normal Pod check-span-cpf6m.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:08:33 | examples-business-application-injected-sidecar | 2024-09-19 02:08:32 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 02:08:34 | examples-business-application-injected-sidecar | Deleting namespace: kuttl-test-equal-lacewing === CONT kuttl/harness/examples-service-types logger.go:42: 02:08:46 | examples-service-types | Creating namespace: kuttl-test-glorious-rhino logger.go:42: 02:08:46 | examples-service-types/0-install | starting test step 0-install logger.go:42: 02:08:46 | examples-service-types/0-install | Jaeger:kuttl-test-glorious-rhino/service-types created logger.go:42: 02:08:50 | examples-service-types/0-install | test step completed 0-install logger.go:42: 02:08:50 | examples-service-types/1-smoke-test | starting test step 1-smoke-test logger.go:42: 02:08:50 | examples-service-types/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE service-types /dev/null] logger.go:42: 02:08:51 | examples-service-types/1-smoke-test | Warning: resource jaegers/service-types is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 02:08:58 | examples-service-types/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://service-types-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://service-types-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 02:08:59 | examples-service-types/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 02:08:59 | examples-service-types/1-smoke-test | job.batch/report-span created logger.go:42: 02:09:00 | examples-service-types/1-smoke-test | job.batch/check-span created logger.go:42: 02:09:13 | examples-service-types/1-smoke-test | test step completed 1-smoke-test logger.go:42: 02:09:13 | examples-service-types/2- | starting test step 2- logger.go:42: 02:09:13 | examples-service-types/2- | test step completed 2- logger.go:42: 02:09:13 | examples-service-types | examples-service-types events from ns kuttl-test-glorious-rhino: logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w Binding Scheduled Successfully assigned kuttl-test-glorious-rhino/service-types-94fdd7bd9-xjx8w to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w AddedInterface Add eth0 [10.130.0.70/23] from ovn-kubernetes multus logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal ReplicaSet.apps service-types-94fdd7bd9 SuccessfulCreate Created pod: service-types-94fdd7bd9-xjx8w replicaset-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Service service-types-collector EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:47 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-94fdd7bd9 to 1 deployment-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:48 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:48 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:50 +0000 UTC Normal Service service-types-collector EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:50 +0000 UTC Normal Service service-types-query EnsuringLoadBalancer Ensuring load balancer service-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal Pod service-types-94fdd7bd9-xjx8w.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal ReplicaSet.apps service-types-94fdd7bd9 SuccessfulDelete Deleted pod: service-types-94fdd7bd9-xjx8w replicaset-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b Binding Scheduled Successfully assigned kuttl-test-glorious-rhino/service-types-fdb68975c-d5v5b to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal ReplicaSet.apps service-types-fdb68975c SuccessfulCreate Created pod: service-types-fdb68975c-d5v5b replicaset-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal Service service-types-query EnsuredLoadBalancer Ensured load balancer service-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled down replica set service-types-94fdd7bd9 to 0 from 1 deployment-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:52 +0000 UTC Normal Deployment.apps service-types ScalingReplicaSet Scaled up replica set service-types-fdb68975c to 1 deployment-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:53 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b AddedInterface Add eth0 [10.130.0.71/23] from ovn-kubernetes multus logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:53 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:53 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:53 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:53 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:53 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:53 +0000 UTC Normal Pod service-types-fdb68975c-d5v5b.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:59 +0000 UTC Normal Pod report-span-mfjnh Binding Scheduled Successfully assigned kuttl-test-glorious-rhino/report-span-mfjnh to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:08:59 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-mfjnh job-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:00 +0000 UTC Normal Pod check-span-6p74k Binding Scheduled Successfully assigned kuttl-test-glorious-rhino/check-span-6p74k to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:00 +0000 UTC Normal Pod check-span-6p74k AddedInterface Add eth0 [10.130.0.73/23] from ovn-kubernetes multus logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:00 +0000 UTC Normal Pod check-span-6p74k.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:00 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-6p74k job-controller logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:00 +0000 UTC Normal Pod report-span-mfjnh AddedInterface Add eth0 [10.130.0.72/23] from ovn-kubernetes multus logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:00 +0000 UTC Normal Pod report-span-mfjnh.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:01 +0000 UTC Normal Pod check-span-6p74k.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 960ms (960ms including waiting) kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:01 +0000 UTC Normal Pod check-span-6p74k.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:01 +0000 UTC Normal Pod check-span-6p74k.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:01 +0000 UTC Normal Pod report-span-mfjnh.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 804ms (804ms including waiting) kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:01 +0000 UTC Normal Pod report-span-mfjnh.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:01 +0000 UTC Normal Pod report-span-mfjnh.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 02:09:13 | examples-service-types | 2024-09-19 02:09:11 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 02:09:13 | examples-service-types | Deleting namespace: kuttl-test-glorious-rhino === CONT kuttl/harness/examples-openshift-with-htpasswd logger.go:42: 02:09:39 | examples-openshift-with-htpasswd | Ignoring 00-install.yaml.template as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:09:39 | examples-openshift-with-htpasswd | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:09:39 | examples-openshift-with-htpasswd | Creating namespace: kuttl-test-optimum-zebra logger.go:42: 02:09:40 | examples-openshift-with-htpasswd/0-install | starting test step 0-install logger.go:42: 02:09:40 | examples-openshift-with-htpasswd/0-install | Secret:kuttl-test-optimum-zebra/htpasswd created logger.go:42: 02:09:40 | examples-openshift-with-htpasswd/0-install | test step completed 0-install logger.go:42: 02:09:40 | examples-openshift-with-htpasswd/1-install | starting test step 1-install logger.go:42: 02:09:40 | examples-openshift-with-htpasswd/1-install | Jaeger:kuttl-test-optimum-zebra/with-htpasswd created logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/1-install | test step completed 1-install logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | starting test step 2-check-unsecured logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [./ensure-ingress-host.sh] logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | Checking the Ingress host value was populated logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 0 logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | Hostname is with-htpasswd-kuttl-test-optimum-zebra.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | running command: [sh -c INSECURE=true ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | Checking an expected HTTP response logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | Running in OpenShift logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | Not using any secret logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 1/30 the https://with-htpasswd-kuttl-test-optimum-zebra.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org/search logger.go:42: 02:09:45 | examples-openshift-with-htpasswd/2-check-unsecured | HTTP response is 503. 403 expected. Waiting 10 s logger.go:42: 02:09:55 | examples-openshift-with-htpasswd/2-check-unsecured | Try number 2/30 the https://with-htpasswd-kuttl-test-optimum-zebra.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org/search logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/2-check-unsecured | curl response asserted properly logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/2-check-unsecured | test step completed 2-check-unsecured logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | starting test step 3-check-unauthorized logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [./ensure-ingress-host.sh] logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking the Ingress host value was populated logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 0 logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | Hostname is with-htpasswd-kuttl-test-optimum-zebra.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | running command: [sh -c JAEGER_USERNAME=wronguser JAEGER_PASSWORD=wrongpassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 403 true $NAMESPACE with-htpasswd] logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | Checking an expected HTTP response logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | Running in OpenShift logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | Using Jaeger basic authentication logger.go:42: 02:09:56 | examples-openshift-with-htpasswd/3-check-unauthorized | Try number 1/30 the https://with-htpasswd-kuttl-test-optimum-zebra.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org/search logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/3-check-unauthorized | curl response asserted properly logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/3-check-unauthorized | test step completed 3-check-unauthorized logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | starting test step 4-check-authorized logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | running command: [./ensure-ingress-host.sh] logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | Checking the Ingress host value was populated logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | Try number 0 logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | Hostname is with-htpasswd-kuttl-test-optimum-zebra.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | running command: [sh -c JAEGER_USERNAME=awesomeuser JAEGER_PASSWORD=awesomepassword ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE with-htpasswd] logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | Checking an expected HTTP response logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | Running in OpenShift logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | Using Jaeger basic authentication logger.go:42: 02:09:57 | examples-openshift-with-htpasswd/4-check-authorized | Try number 1/30 the https://with-htpasswd-kuttl-test-optimum-zebra.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org/search logger.go:42: 02:09:58 | examples-openshift-with-htpasswd/4-check-authorized | curl response asserted properly logger.go:42: 02:09:58 | examples-openshift-with-htpasswd/4-check-authorized | test step completed 4-check-authorized logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | examples-openshift-with-htpasswd events from ns kuttl-test-optimum-zebra: logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:41 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh Binding Scheduled Successfully assigned kuttl-test-optimum-zebra/with-htpasswd-79d695ff64-92dbh to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:41 +0000 UTC Warning Pod with-htpasswd-79d695ff64-92dbh FailedMount MountVolume.SetUp failed for volume "with-htpasswd-ui-oauth-proxy-tls" : secret "with-htpasswd-ui-oauth-proxy-tls" not found kubelet logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:41 +0000 UTC Normal ReplicaSet.apps with-htpasswd-79d695ff64 SuccessfulCreate Created pod: with-htpasswd-79d695ff64-92dbh replicaset-controller logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:41 +0000 UTC Normal Deployment.apps with-htpasswd ScalingReplicaSet Scaled up replica set with-htpasswd-79d695ff64 to 1 deployment-controller logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:42 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh AddedInterface Add eth0 [10.130.0.74/23] from ovn-kubernetes multus logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:42 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:42 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:42 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:42 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:42 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | 2024-09-19 02:09:42 +0000 UTC Normal Pod with-htpasswd-79d695ff64-92dbh.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:09:58 | examples-openshift-with-htpasswd | Deleting namespace: kuttl-test-optimum-zebra === CONT kuttl/harness/examples-all-in-one-with-options logger.go:42: 02:10:04 | examples-all-in-one-with-options | Creating namespace: kuttl-test-sweet-kit logger.go:42: 02:10:04 | examples-all-in-one-with-options/0-install | starting test step 0-install logger.go:42: 02:10:04 | examples-all-in-one-with-options/0-install | Jaeger:kuttl-test-sweet-kit/my-jaeger created logger.go:42: 02:10:07 | examples-all-in-one-with-options/0-install | test step completed 0-install logger.go:42: 02:10:07 | examples-all-in-one-with-options/1-smoke-test | starting test step 1-smoke-test logger.go:42: 02:10:07 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE my-jaeger /dev/null] logger.go:42: 02:10:09 | examples-all-in-one-with-options/1-smoke-test | Warning: resource jaegers/my-jaeger is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 02:10:16 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://my-jaeger-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://my-jaeger-query:443/jaeger MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 02:10:16 | examples-all-in-one-with-options/1-smoke-test | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 02:10:17 | examples-all-in-one-with-options/1-smoke-test | job.batch/report-span created logger.go:42: 02:10:17 | examples-all-in-one-with-options/1-smoke-test | job.batch/check-span created logger.go:42: 02:10:29 | examples-all-in-one-with-options/1-smoke-test | test step completed 1-smoke-test logger.go:42: 02:10:29 | examples-all-in-one-with-options | examples-all-in-one-with-options events from ns kuttl-test-sweet-kit: logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:05 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8 Binding Scheduled Successfully assigned kuttl-test-sweet-kit/my-jaeger-6877948f48-gwbt8 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:05 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8 AddedInterface Add eth0 [10.130.0.76/23] from ovn-kubernetes multus logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:05 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:05 +0000 UTC Normal ReplicaSet.apps my-jaeger-6877948f48 SuccessfulCreate Created pod: my-jaeger-6877948f48-gwbt8 replicaset-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:05 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-6877948f48 to 1 deployment-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:06 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:06 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:06 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:06 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:06 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:09 +0000 UTC Normal ReplicaSet.apps my-jaeger-6877948f48 SuccessfulDelete Deleted pod: my-jaeger-6877948f48-gwbt8 replicaset-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:09 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled down replica set my-jaeger-6877948f48 to 0 from 1 deployment-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:10 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:10 +0000 UTC Normal Pod my-jaeger-6877948f48-gwbt8.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v Binding Scheduled Successfully assigned kuttl-test-sweet-kit/my-jaeger-d9977d9b4-s7c9v to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v AddedInterface Add eth0 [10.130.0.77/23] from ovn-kubernetes multus logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Pod my-jaeger-d9977d9b4-s7c9v.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal ReplicaSet.apps my-jaeger-d9977d9b4 SuccessfulCreate Created pod: my-jaeger-d9977d9b4-s7c9v replicaset-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:11 +0000 UTC Normal Deployment.apps my-jaeger ScalingReplicaSet Scaled up replica set my-jaeger-d9977d9b4 to 1 deployment-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:17 +0000 UTC Normal Pod check-span-cltjp Binding Scheduled Successfully assigned kuttl-test-sweet-kit/check-span-cltjp to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:17 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-cltjp job-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:17 +0000 UTC Normal Pod report-span-xrgwh Binding Scheduled Successfully assigned kuttl-test-sweet-kit/report-span-xrgwh to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:17 +0000 UTC Normal Pod report-span-xrgwh AddedInterface Add eth0 [10.130.0.78/23] from ovn-kubernetes multus logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:17 +0000 UTC Normal Pod report-span-xrgwh.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:17 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-xrgwh job-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:18 +0000 UTC Normal Pod check-span-cltjp AddedInterface Add eth0 [10.130.0.79/23] from ovn-kubernetes multus logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:18 +0000 UTC Normal Pod check-span-cltjp.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:18 +0000 UTC Normal Pod check-span-cltjp.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 821ms (821ms including waiting) kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:18 +0000 UTC Normal Pod report-span-xrgwh.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 823ms (823ms including waiting) kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:18 +0000 UTC Normal Pod report-span-xrgwh.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:18 +0000 UTC Normal Pod report-span-xrgwh.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:19 +0000 UTC Normal Pod check-span-cltjp.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:19 +0000 UTC Normal Pod check-span-cltjp.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:10:29 | examples-all-in-one-with-options | 2024-09-19 02:10:28 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 02:10:29 | examples-all-in-one-with-options | Deleting namespace: kuttl-test-sweet-kit === CONT kuttl/harness/examples-agent-with-priority-class logger.go:42: 02:10:41 | examples-agent-with-priority-class | Creating namespace: kuttl-test-measured-yeti logger.go:42: 02:10:41 | examples-agent-with-priority-class/0-install | starting test step 0-install logger.go:42: 02:10:41 | examples-agent-with-priority-class/0-install | SecurityContextConstraints:/daemonset-with-hostport created logger.go:42: 02:10:42 | examples-agent-with-priority-class/0-install | ServiceAccount:kuttl-test-measured-yeti/jaeger-agent-daemonset created logger.go:42: 02:10:42 | examples-agent-with-priority-class/0-install | test step completed 0-install logger.go:42: 02:10:42 | examples-agent-with-priority-class/1-add-policy | starting test step 1-add-policy logger.go:42: 02:10:42 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c oc adm policy --namespace $NAMESPACE add-scc-to-user daemonset-with-hostport -z jaeger-agent-daemonset] logger.go:42: 02:10:42 | examples-agent-with-priority-class/1-add-policy | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:daemonset-with-hostport added: "jaeger-agent-daemonset" logger.go:42: 02:10:42 | examples-agent-with-priority-class/1-add-policy | running command: [sh -c sleep 5] logger.go:42: 02:10:47 | examples-agent-with-priority-class/1-add-policy | test step completed 1-add-policy logger.go:42: 02:10:47 | examples-agent-with-priority-class/2-install | starting test step 2-install logger.go:42: 02:10:47 | examples-agent-with-priority-class/2-install | running command: [sh -c SERVICE_ACCOUNT_NAME=e2e-test ../../../../cmd-utils/get-token.sh $NAMESPACE agent-as-daemonset /dev/null] logger.go:42: 02:10:56 | examples-agent-with-priority-class/2-install | running command: [sh -c ASSERT_IMG=quay.io/rhn_support_ikanse/jaeger-asserts:latest JAEGER_COLLECTOR_ENDPOINT=http://agent-as-daemonset-collector-headless:14268 JAEGER_QUERY_ENDPOINT=https://agent-as-daemonset-query:443 MOUNT_SECRET=e2e-test /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test.yaml.template -o smoke-test-job.yaml] logger.go:42: 02:10:57 | examples-agent-with-priority-class/2-install | running command: [sh -c kubectl apply -f smoke-test-job.yaml -n $NAMESPACE] logger.go:42: 02:10:57 | examples-agent-with-priority-class/2-install | job.batch/report-span created logger.go:42: 02:10:57 | examples-agent-with-priority-class/2-install | job.batch/check-span created logger.go:42: 02:10:58 | examples-agent-with-priority-class/2-install | PriorityClass:/high-priority created logger.go:42: 02:10:58 | examples-agent-with-priority-class/2-install | Jaeger:kuttl-test-measured-yeti/agent-as-daemonset updated logger.go:42: 02:11:09 | examples-agent-with-priority-class/2-install | test step completed 2-install logger.go:42: 02:11:09 | examples-agent-with-priority-class | examples-agent-with-priority-class events from ns kuttl-test-measured-yeti: logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:49 +0000 UTC Normal Deployment.apps agent-as-daemonset ScalingReplicaSet Scaled up replica set agent-as-daemonset-b5d7b5d to 1 deployment-controller logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s Binding Scheduled Successfully assigned kuttl-test-measured-yeti/agent-as-daemonset-b5d7b5d-5tg9s to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s AddedInterface Add eth0 [10.130.0.80/23] from ovn-kubernetes multus logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal Pod agent-as-daemonset-b5d7b5d-5tg9s.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:50 +0000 UTC Normal ReplicaSet.apps agent-as-daemonset-b5d7b5d SuccessfulCreate Created pod: agent-as-daemonset-b5d7b5d-5tg9s replicaset-controller logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:57 +0000 UTC Normal Pod check-span-bhhxh Binding Scheduled Successfully assigned kuttl-test-measured-yeti/check-span-bhhxh to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:57 +0000 UTC Normal Job.batch check-span SuccessfulCreate Created pod: check-span-bhhxh job-controller logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:57 +0000 UTC Normal Pod report-span-gj7cz Binding Scheduled Successfully assigned kuttl-test-measured-yeti/report-span-gj7cz to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:57 +0000 UTC Normal Pod report-span-gj7cz AddedInterface Add eth0 [10.130.0.81/23] from ovn-kubernetes multus logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:57 +0000 UTC Normal Pod report-span-gj7cz.spec.containers{report-span} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:57 +0000 UTC Normal Job.batch report-span SuccessfulCreate Created pod: report-span-gj7cz job-controller logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:58 +0000 UTC Warning DaemonSet.apps agent-as-daemonset-agent-daemonset FailedCreate Error creating: pods "agent-as-daemonset-agent-daemonset-" is forbidden: unable to validate against any security context constraint: [provider "anyuid": Forbidden: not usable by user or serviceaccount, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5775: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 5778: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6831: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 6832: Host ports are not allowed to be used, provider restricted-v2: .containers[0].containers[0].hostPort: Invalid value: 14271: Host ports are not allowed to be used, provider "restricted": Forbidden: not usable by user or serviceaccount, provider "nonroot-v2": Forbidden: not usable by user or serviceaccount, provider "nonroot": Forbidden: not usable by user or serviceaccount, provider "pcap-dedicated-admins": Forbidden: not usable by user or serviceaccount, provider "hostmount-anyuid": Forbidden: not usable by user or serviceaccount, provider "daemonset-with-hostport": Forbidden: not usable by user or serviceaccount, provider "hostnetwork-v2": Forbidden: not usable by user or serviceaccount, provider "hostnetwork": Forbidden: not usable by user or serviceaccount, provider "hostaccess": Forbidden: not usable by user or serviceaccount, provider "node-exporter": Forbidden: not usable by user or serviceaccount, provider "privileged": Forbidden: not usable by user or serviceaccount] daemonset-controller logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:58 +0000 UTC Normal Pod check-span-bhhxh AddedInterface Add eth0 [10.130.0.82/23] from ovn-kubernetes multus logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:58 +0000 UTC Normal Pod check-span-bhhxh.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:58 +0000 UTC Normal Pod check-span-bhhxh.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 873ms (873ms including waiting) kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:58 +0000 UTC Normal Pod report-span-gj7cz.spec.containers{report-span} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 874ms (874ms including waiting) kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:58 +0000 UTC Normal Pod report-span-gj7cz.spec.containers{report-span} Created Created container report-span kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:58 +0000 UTC Normal Pod report-span-gj7cz.spec.containers{report-span} Started Started container report-span kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:59 +0000 UTC Normal Pod check-span-bhhxh.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:10:59 +0000 UTC Normal Pod check-span-bhhxh.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:11:09 | examples-agent-with-priority-class | 2024-09-19 02:11:09 +0000 UTC Normal Job.batch check-span Completed Job completed job-controller logger.go:42: 02:11:10 | examples-agent-with-priority-class | Deleting namespace: kuttl-test-measured-yeti === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (1813.85s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.00s) --- FAIL: kuttl/harness/examples-simple-prod (607.47s) --- PASS: kuttl/harness/examples-with-sampling (244.28s) --- PASS: kuttl/harness/examples-with-cassandra (73.79s) --- PASS: kuttl/harness/examples-with-badger (40.09s) --- PASS: kuttl/harness/examples-simplest (38.04s) --- FAIL: kuttl/harness/examples-simple-prod-with-volumes (607.53s) --- PASS: kuttl/harness/examples-business-application-injected-sidecar (34.44s) --- PASS: kuttl/harness/examples-service-types (53.72s) --- PASS: kuttl/harness/examples-openshift-with-htpasswd (24.58s) --- PASS: kuttl/harness/examples-all-in-one-with-options (36.95s) --- PASS: kuttl/harness/examples-agent-with-priority-class (46.33s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name examples --report --output /logs/artifacts/examples.xml ./artifacts/kuttl-report.xml time="2024-09-19T02:11:29Z" level=debug msg="Setting a new name for the test suites" time="2024-09-19T02:11:29Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-09-19T02:11:29Z" level=debug msg="normalizing test case names" time="2024-09-19T02:11:29Z" level=debug msg="examples/artifacts -> examples_artifacts" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-simple-prod -> examples_examples_simple_prod" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-with-sampling -> examples_examples_with_sampling" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-with-cassandra -> examples_examples_with_cassandra" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-with-badger -> examples_examples_with_badger" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-simplest -> examples_examples_simplest" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-simple-prod-with-volumes -> examples_examples_simple_prod_with_volumes" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-business-application-injected-sidecar -> examples_examples_business_application_injected_sidecar" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-service-types -> examples_examples_service_types" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-openshift-with-htpasswd -> examples_examples_openshift_with_htpasswd" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-all-in-one-with-options -> examples_examples_all_in_one_with_options" time="2024-09-19T02:11:29Z" level=debug msg="examples/examples-agent-with-priority-class -> examples_examples_agent_with_priority_class" +---------------------------------------------------------+--------+ | NAME | RESULT | +---------------------------------------------------------+--------+ | examples_artifacts | passed | | examples_examples_simple_prod | failed | | examples_examples_with_sampling | passed | | examples_examples_with_cassandra | passed | | examples_examples_with_badger | passed | | examples_examples_simplest | passed | | examples_examples_simple_prod_with_volumes | failed | | examples_examples_business_application_injected_sidecar | passed | | examples_examples_service_types | passed | | examples_examples_openshift_with_htpasswd | passed | | examples_examples_all_in_one_with_options | passed | | examples_examples_agent_with_priority_class | passed | +---------------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh generate false true + '[' 3 -ne 3 ']' + test_suite_name=generate + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/generate.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-generate make[2]: Entering directory '/tmp/jaeger-tests' test -s /tmp/jaeger-tests/bin/operator-sdk || curl -sLo /tmp/jaeger-tests/bin/operator-sdk https://github.com/operator-framework/operator-sdk/releases/download/v1.32.0/operator-sdk_`go env GOOS`_`go env GOARCH` ./hack/install/install-golangci-lint.sh Installing golangci-lint golangci-lint 1.55.2 is installed already ./hack/install/install-goimports.sh Installing goimports Try 0... go install golang.org/x/tools/cmd/goimports@v0.1.12 >>>> Formatting code... ./.ci/format.sh >>>> Building... ./hack/install/install-dependencies.sh Installing go dependencies Try 0... go mod download GOOS= GOARCH= CGO_ENABLED=0 GO111MODULE=on go build -ldflags "-X "github.com/jaegertracing/jaeger-operator/pkg/version".version="1.61.0" -X "github.com/jaegertracing/jaeger-operator/pkg/version".buildDate=2024-09-19T02:11:31Z -X "github.com/jaegertracing/jaeger-operator/pkg/version".defaultJaeger="1.61.0"" -o "bin/jaeger-operator" main.go JAEGER_VERSION="1.61.0" ./tests/e2e/generate/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 86m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 86m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/generate/render.sh ++ export SUITE_DIR=./tests/e2e/generate ++ SUITE_DIR=./tests/e2e/generate ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/generate ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + '[' true = true ']' + skip_test generate 'This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 2 -ne 2 ']' + test_name=generate + message='This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/generate/_build + '[' _build '!=' _build ']' + rm -rf generate + warning 'generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed\e[0m' WAR: generate: This test was skipped until https://github.com/jaegertracing/jaeger-operator/issues/2145 is fixed make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running generate E2E tests' Running generate E2E tests + cd tests/e2e/generate/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1829818249 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-rosa-h-a4mj.qd6c.s3.devshift.org:443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 02:11:48 | artifacts | Creating namespace: kuttl-test-whole-raptor logger.go:42: 02:11:48 | artifacts | artifacts events from ns kuttl-test-whole-raptor: logger.go:42: 02:11:48 | artifacts | Deleting namespace: kuttl-test-whole-raptor === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.67s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.09s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name generate --report --output /logs/artifacts/generate.xml ./artifacts/kuttl-report.xml time="2024-09-19T02:11:54Z" level=debug msg="Setting a new name for the test suites" time="2024-09-19T02:11:54Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-09-19T02:11:54Z" level=debug msg="normalizing test case names" time="2024-09-19T02:11:54Z" level=debug msg="generate/artifacts -> generate_artifacts" +--------------------+--------+ | NAME | RESULT | +--------------------+--------+ | generate_artifacts | passed | +--------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh upgrade false true + '[' 3 -ne 3 ']' + test_suite_name=upgrade + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/upgrade.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-upgrade make[2]: Entering directory '/tmp/jaeger-tests' make docker JAEGER_VERSION=1.61.1 IMG="quay.io//jaeger-operator:next" make[3]: Entering directory '/tmp/jaeger-tests' [ ! -z "true" ] || docker build --build-arg=GOPROXY= --build-arg=VERSION="1.61.0" --build-arg=JAEGER_VERSION=1.61.1 --build-arg=TARGETARCH= --build-arg VERSION_DATE=2024-09-19T02:11:54Z --build-arg VERSION_PKG="github.com/jaegertracing/jaeger-operator/pkg/version" -t "quay.io//jaeger-operator:next" . make[3]: Leaving directory '/tmp/jaeger-tests' touch build-e2e-upgrade-image SKIP_ES_EXTERNAL=true IMG=quay.io//jaeger-operator:"1.61.0" JAEGER_OPERATOR_VERSION="1.61.0" JAEGER_VERSION="1.61.0" ./tests/e2e/upgrade/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 86m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 86m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/upgrade/render.sh ++ export SUITE_DIR=./tests/e2e/upgrade ++ SUITE_DIR=./tests/e2e/upgrade ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/upgrade ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + export JAEGER_NAME + '[' true = true ']' + skip_test upgrade 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade + warning 'upgrade: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade: Test not supported in OpenShift\e[0m' WAR: upgrade: Test not supported in OpenShift + '[' true = true ']' + skip_test upgrade-from-latest-release 'Test not supported in OpenShift' + '[' 2 -ne 2 ']' + test_name=upgrade-from-latest-release + message='Test not supported in OpenShift' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/upgrade/_build + '[' _build '!=' _build ']' + rm -rf upgrade-from-latest-release + warning 'upgrade-from-latest-release: Test not supported in OpenShift' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: upgrade-from-latest-release: Test not supported in OpenShift\e[0m' WAR: upgrade-from-latest-release: Test not supported in OpenShift make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running upgrade E2E tests' Running upgrade E2E tests + cd tests/e2e/upgrade/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1829818249 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-rosa-h-a4mj.qd6c.s3.devshift.org:443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 1 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === CONT kuttl/harness/artifacts logger.go:42: 02:11:56 | artifacts | Creating namespace: kuttl-test-concrete-horse logger.go:42: 02:11:56 | artifacts | artifacts events from ns kuttl-test-concrete-horse: logger.go:42: 02:11:56 | artifacts | Deleting namespace: kuttl-test-concrete-horse === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (6.77s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.13s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name upgrade --report --output /logs/artifacts/upgrade.xml ./artifacts/kuttl-report.xml time="2024-09-19T02:12:02Z" level=debug msg="Setting a new name for the test suites" time="2024-09-19T02:12:02Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-09-19T02:12:02Z" level=debug msg="normalizing test case names" time="2024-09-19T02:12:02Z" level=debug msg="upgrade/artifacts -> upgrade_artifacts" +-------------------+--------+ | NAME | RESULT | +-------------------+--------+ | upgrade_artifacts | passed | +-------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh sidecar false true + '[' 3 -ne 3 ']' + test_suite_name=sidecar + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/sidecar.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-sidecar make[2]: Entering directory '/tmp/jaeger-tests' ./tests/e2e/sidecar/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 86m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 86m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/sidecar/render.sh ++ export SUITE_DIR=./tests/e2e/sidecar ++ SUITE_DIR=./tests/e2e/sidecar ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/sidecar ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + jaeger_service_name=order + start_test sidecar-deployment + '[' 1 -ne 1 ']' + test_name=sidecar-deployment + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-deployment' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-deployment\e[0m' Rendering files for test sidecar-deployment + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build + '[' _build '!=' _build ']' + mkdir -p sidecar-deployment + cd sidecar-deployment + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-namespace + '[' 1 -ne 1 ']' + test_name=sidecar-namespace + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-namespace' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-namespace\e[0m' Rendering files for test sidecar-namespace + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-deployment + '[' sidecar-deployment '!=' _build ']' + cd .. + mkdir -p sidecar-namespace + cd sidecar-namespace + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml + render_find_service agent-as-sidecar allInOne order 00 03 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar + deployment_strategy=allInOne + service_name=order + job_number=00 + test_step=03 + export JAEGER_NAME=agent-as-sidecar + JAEGER_NAME=agent-as-sidecar + export JOB_NUMBER=00 + JOB_NUMBER=00 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./03-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./03-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + render_find_service agent-as-sidecar2 allInOne order 01 06 + '[' 5 -ne 5 ']' + jaeger=agent-as-sidecar2 + deployment_strategy=allInOne + service_name=order + job_number=01 + test_step=06 + export JAEGER_NAME=agent-as-sidecar2 + JAEGER_NAME=agent-as-sidecar2 + export JOB_NUMBER=01 + JOB_NUMBER=01 + export SERVICE_NAME=order + SERVICE_NAME=order + export JAEGER_QUERY_ENDPOINT + '[' true = true ']' + '[' allInOne '!=' allInOne ']' + template=/tmp/jaeger-tests/tests/templates/find-service.yaml.template + JAEGER_QUERY_ENDPOINT=http://agent-as-sidecar2-query:16686 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/find-service.yaml.template -o ./06-find-service.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-find-service.yaml.template -o ./06-assert.yaml + unset JAEGER_NAME + unset SERVICE_NAME + unset JOB_NUMBER + unset JAEGER_COLLECTOR_ENDPOINT + start_test sidecar-skip-webhook + '[' 1 -ne 1 ']' + test_name=sidecar-skip-webhook + echo =========================================================================== =========================================================================== + info 'Rendering files for test sidecar-skip-webhook' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test sidecar-skip-webhook\e[0m' Rendering files for test sidecar-skip-webhook + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/sidecar/_build/sidecar-namespace + '[' sidecar-namespace '!=' _build ']' + cd .. + mkdir -p sidecar-skip-webhook + cd sidecar-skip-webhook + render_install_vertx 01 + '[' 1 -ne 1 ']' + test_step=01 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/vertex-assert.yaml.template -o ./01-assert.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running sidecar E2E tests' Running sidecar E2E tests + cd tests/e2e/sidecar/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1829818249 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-rosa-h-a4mj.qd6c.s3.devshift.org:443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 4 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/sidecar-deployment === PAUSE kuttl/harness/sidecar-deployment === RUN kuttl/harness/sidecar-namespace === PAUSE kuttl/harness/sidecar-namespace === RUN kuttl/harness/sidecar-skip-webhook === PAUSE kuttl/harness/sidecar-skip-webhook === CONT kuttl/harness/artifacts logger.go:42: 02:12:11 | artifacts | Creating namespace: kuttl-test-factual-snake logger.go:42: 02:12:11 | artifacts | artifacts events from ns kuttl-test-factual-snake: logger.go:42: 02:12:11 | artifacts | Deleting namespace: kuttl-test-factual-snake === CONT kuttl/harness/sidecar-namespace logger.go:42: 02:12:17 | sidecar-namespace | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:12:17 | sidecar-namespace | Creating namespace: kuttl-test-whole-hamster logger.go:42: 02:12:17 | sidecar-namespace/0-install | starting test step 0-install logger.go:42: 02:12:17 | sidecar-namespace/0-install | Jaeger:kuttl-test-whole-hamster/agent-as-sidecar created logger.go:42: 02:12:21 | sidecar-namespace/0-install | test step completed 0-install logger.go:42: 02:12:21 | sidecar-namespace/1-install | starting test step 1-install logger.go:42: 02:12:21 | sidecar-namespace/1-install | Deployment:kuttl-test-whole-hamster/vertx-create-span-sidecar created logger.go:42: 02:12:22 | sidecar-namespace/1-install | test step completed 1-install logger.go:42: 02:12:22 | sidecar-namespace/2-enable-injection | starting test step 2-enable-injection logger.go:42: 02:12:22 | sidecar-namespace/2-enable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="true"] logger.go:42: 02:12:23 | sidecar-namespace/2-enable-injection | namespace/kuttl-test-whole-hamster annotated logger.go:42: 02:12:24 | sidecar-namespace/2-enable-injection | test step completed 2-enable-injection logger.go:42: 02:12:24 | sidecar-namespace/3-find-service | starting test step 3-find-service logger.go:42: 02:12:24 | sidecar-namespace/3-find-service | Job:kuttl-test-whole-hamster/00-find-service created logger.go:42: 02:12:36 | sidecar-namespace/3-find-service | test step completed 3-find-service logger.go:42: 02:12:36 | sidecar-namespace/4-other-instance | starting test step 4-other-instance logger.go:42: 02:12:36 | sidecar-namespace/4-other-instance | Jaeger:kuttl-test-whole-hamster/agent-as-sidecar2 created logger.go:42: 02:12:40 | sidecar-namespace/4-other-instance | test step completed 4-other-instance logger.go:42: 02:12:40 | sidecar-namespace/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 02:12:40 | sidecar-namespace/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 02:12:40 | sidecar-namespace/6-find-service | starting test step 6-find-service logger.go:42: 02:12:41 | sidecar-namespace/6-find-service | Job:kuttl-test-whole-hamster/01-find-service created logger.go:42: 02:13:01 | sidecar-namespace/6-find-service | test step completed 6-find-service logger.go:42: 02:13:01 | sidecar-namespace/7-disable-injection | starting test step 7-disable-injection logger.go:42: 02:13:01 | sidecar-namespace/7-disable-injection | running command: [sh -c kubectl annotate --overwrite namespaces $NAMESPACE "sidecar.jaegertracing.io/inject"="false"] logger.go:42: 02:13:01 | sidecar-namespace/7-disable-injection | namespace/kuttl-test-whole-hamster annotated logger.go:42: 02:13:03 | sidecar-namespace/7-disable-injection | test step completed 7-disable-injection logger.go:42: 02:13:03 | sidecar-namespace | sidecar-namespace events from ns kuttl-test-whole-hamster: logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:18 +0000 UTC Normal Pod agent-as-sidecar-79694758c8-lkg5s Binding Scheduled Successfully assigned kuttl-test-whole-hamster/agent-as-sidecar-79694758c8-lkg5s to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:18 +0000 UTC Normal Pod agent-as-sidecar-79694758c8-lkg5s AddedInterface Add eth0 [10.130.0.83/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:18 +0000 UTC Normal Pod agent-as-sidecar-79694758c8-lkg5s.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:18 +0000 UTC Normal Pod agent-as-sidecar-79694758c8-lkg5s.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:18 +0000 UTC Normal Pod agent-as-sidecar-79694758c8-lkg5s.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:18 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-79694758c8 SuccessfulCreate Created pod: agent-as-sidecar-79694758c8-lkg5s replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:18 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-79694758c8 to 1 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:21 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-bw44p Binding Scheduled Successfully assigned kuttl-test-whole-hamster/vertx-create-span-sidecar-84d458b68c-bw44p to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:21 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-bw44p replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:21 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:22 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-bw44p AddedInterface Add eth0 [10.130.0.84/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:22 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:22 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:22 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz Binding Scheduled Successfully assigned kuttl-test-whole-hamster/vertx-create-span-sidecar-7ff7788d48-ht5dz to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz AddedInterface Add eth0 [10.130.0.85/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7ff7788d48 SuccessfulCreate Created pod: vertx-create-span-sidecar-7ff7788d48-ht5dz replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:23 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7ff7788d48 to 1 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:24 +0000 UTC Normal Pod 00-find-service-tn5fz Binding Scheduled Successfully assigned kuttl-test-whole-hamster/00-find-service-tn5fz to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:24 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-tn5fz job-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:25 +0000 UTC Normal Pod 00-find-service-tn5fz AddedInterface Add eth0 [10.130.0.86/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:25 +0000 UTC Normal Pod 00-find-service-tn5fz.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:26 +0000 UTC Normal Pod 00-find-service-tn5fz.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 884ms (884ms including waiting) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:26 +0000 UTC Normal Pod 00-find-service-tn5fz.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:26 +0000 UTC Normal Pod 00-find-service-tn5fz.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:31 +0000 UTC Warning Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.85:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:31 +0000 UTC Warning Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.130.0.85:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:31 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.84:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:31 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.130.0.84:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:33 +0000 UTC Normal Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:33 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:33 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.84:8080/": read tcp 10.130.0.2:37432->10.130.0.84:8080: read: connection reset by peer kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:33 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.84:8080/": dial tcp 10.130.0.84:8080: connect: connection refused kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:34 +0000 UTC Warning Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.85:8080/": read tcp 10.130.0.2:59934->10.130.0.85:8080: read: connection reset by peer kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:34 +0000 UTC Warning Pod vertx-create-span-sidecar-7ff7788d48-ht5dz.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.85:8080/": dial tcp 10.130.0.85:8080: connect: connection refused kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:36 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:38 +0000 UTC Normal Pod agent-as-sidecar2-764dfd6b4-qp8wc Binding Scheduled Successfully assigned kuttl-test-whole-hamster/agent-as-sidecar2-764dfd6b4-qp8wc to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:38 +0000 UTC Normal Pod agent-as-sidecar2-764dfd6b4-qp8wc AddedInterface Add eth0 [10.130.0.87/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:38 +0000 UTC Normal Pod agent-as-sidecar2-764dfd6b4-qp8wc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:38 +0000 UTC Normal Pod agent-as-sidecar2-764dfd6b4-qp8wc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:38 +0000 UTC Normal Pod agent-as-sidecar2-764dfd6b4-qp8wc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:38 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-764dfd6b4 SuccessfulCreate Created pod: agent-as-sidecar2-764dfd6b4-qp8wc replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:38 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-764dfd6b4 to 1 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:40 +0000 UTC Normal Pod agent-as-sidecar-79694758c8-lkg5s.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:40 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd Binding Scheduled Successfully assigned kuttl-test-whole-hamster/vertx-create-span-sidecar-6856b5bb96-g8dmd to ip-10-0-52-194.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:40 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6856b5bb96 SuccessfulCreate Created pod: vertx-create-span-sidecar-6856b5bb96-g8dmd replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:40 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Killing Stopping container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:40 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-bw44p replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:40 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:40 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6856b5bb96 to 1 from 0 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:41 +0000 UTC Normal Pod 01-find-service-96l5m Binding Scheduled Successfully assigned kuttl-test-whole-hamster/01-find-service-96l5m to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:41 +0000 UTC Normal Pod 01-find-service-96l5m AddedInterface Add eth0 [10.130.0.88/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:41 +0000 UTC Normal Pod 01-find-service-96l5m.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:41 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-96l5m job-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:41 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd AddedInterface Add eth0 [10.129.0.32/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:41 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Pulling Pulling image "jaegertracing/vertx-create-span:operator-e2e-tests" kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:41 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-bw44p.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.84:8080/": read tcp 10.130.0.2:42880->10.130.0.84:8080: read: connection reset by peer kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:42 +0000 UTC Normal Pod 01-find-service-96l5m.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 820ms (820ms including waiting) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:42 +0000 UTC Normal Pod 01-find-service-96l5m.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:42 +0000 UTC Normal Pod 01-find-service-96l5m.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:46 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Pulled Successfully pulled image "jaegertracing/vertx-create-span:operator-e2e-tests" in 5.52s (5.52s including waiting) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:47 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:47 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:47 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{jaeger-agent} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:51 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{jaeger-agent} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" in 4.869s (4.869s including waiting) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:52 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:52 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:55 +0000 UTC Warning Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.0.32:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:55 +0000 UTC Warning Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.0.32:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:57 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:57 +0000 UTC Warning Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.0.32:8080/": read tcp 10.129.0.2:38646->10.129.0.32:8080: read: connection reset by peer kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:57 +0000 UTC Warning Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.0.32:8080/": dial tcp 10.129.0.32:8080: connect: connection refused kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:12:57 +0000 UTC Normal Pod vertx-create-span-sidecar-6856b5bb96-g8dmd.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:00 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:01 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7ff7788d48 SuccessfulDelete Deleted pod: vertx-create-span-sidecar-7ff7788d48-ht5dz replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:01 +0000 UTC Normal Pod vertx-create-span-sidecar-9dbf68b8d-hs42h Binding Scheduled Successfully assigned kuttl-test-whole-hamster/vertx-create-span-sidecar-9dbf68b8d-hs42h to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:01 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-9dbf68b8d SuccessfulCreate Created pod: vertx-create-span-sidecar-9dbf68b8d-hs42h replicaset-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:01 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-7ff7788d48 to 0 from 1 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:01 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-9dbf68b8d to 1 from 0 deployment-controller logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:02 +0000 UTC Normal Pod vertx-create-span-sidecar-9dbf68b8d-hs42h AddedInterface Add eth0 [10.130.0.89/23] from ovn-kubernetes multus logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:02 +0000 UTC Normal Pod vertx-create-span-sidecar-9dbf68b8d-hs42h.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:02 +0000 UTC Normal Pod vertx-create-span-sidecar-9dbf68b8d-hs42h.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | 2024-09-19 02:13:02 +0000 UTC Normal Pod vertx-create-span-sidecar-9dbf68b8d-hs42h.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:13:03 | sidecar-namespace | Deleting namespace: kuttl-test-whole-hamster === CONT kuttl/harness/sidecar-skip-webhook logger.go:42: 02:13:10 | sidecar-skip-webhook | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:13:10 | sidecar-skip-webhook | Creating namespace: kuttl-test-key-hookworm logger.go:42: 02:13:10 | sidecar-skip-webhook/0-install | starting test step 0-install logger.go:42: 02:13:10 | sidecar-skip-webhook/0-install | Jaeger:kuttl-test-key-hookworm/agent-as-sidecar created logger.go:42: 02:13:13 | sidecar-skip-webhook/0-install | test step completed 0-install logger.go:42: 02:13:13 | sidecar-skip-webhook/1-install | starting test step 1-install logger.go:42: 02:13:14 | sidecar-skip-webhook/1-install | Deployment:kuttl-test-key-hookworm/vertx-create-span-sidecar created logger.go:42: 02:13:16 | sidecar-skip-webhook/1-install | test step completed 1-install logger.go:42: 02:13:16 | sidecar-skip-webhook/2-add-anotation-and-label | starting test step 2-add-anotation-and-label logger.go:42: 02:13:16 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name=jaeger-operator --namespace kuttl-test-key-hookworm] logger.go:42: 02:13:16 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar labeled logger.go:42: 02:13:16 | sidecar-skip-webhook/2-add-anotation-and-label | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-key-hookworm] logger.go:42: 02:13:17 | sidecar-skip-webhook/2-add-anotation-and-label | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 02:13:17 | sidecar-skip-webhook/2-add-anotation-and-label | test step completed 2-add-anotation-and-label logger.go:42: 02:13:17 | sidecar-skip-webhook/3-remove-label | starting test step 3-remove-label logger.go:42: 02:13:17 | sidecar-skip-webhook/3-remove-label | running command: [kubectl label deployment vertx-create-span-sidecar app.kubernetes.io/name- --namespace kuttl-test-key-hookworm] logger.go:42: 02:13:18 | sidecar-skip-webhook/3-remove-label | deployment.apps/vertx-create-span-sidecar unlabeled logger.go:42: 02:13:19 | sidecar-skip-webhook/3-remove-label | test step completed 3-remove-label logger.go:42: 02:13:19 | sidecar-skip-webhook | sidecar-skip-webhook events from ns kuttl-test-key-hookworm: logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:10 +0000 UTC Normal Pod agent-as-sidecar-8498f6f558-nplrt Binding Scheduled Successfully assigned kuttl-test-key-hookworm/agent-as-sidecar-8498f6f558-nplrt to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:10 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-8498f6f558 SuccessfulCreate Created pod: agent-as-sidecar-8498f6f558-nplrt replicaset-controller logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:10 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-8498f6f558 to 1 deployment-controller logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:11 +0000 UTC Normal Pod agent-as-sidecar-8498f6f558-nplrt AddedInterface Add eth0 [10.130.0.90/23] from ovn-kubernetes multus logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:11 +0000 UTC Normal Pod agent-as-sidecar-8498f6f558-nplrt.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:11 +0000 UTC Normal Pod agent-as-sidecar-8498f6f558-nplrt.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:11 +0000 UTC Normal Pod agent-as-sidecar-8498f6f558-nplrt.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t54dq Binding Scheduled Successfully assigned kuttl-test-key-hookworm/vertx-create-span-sidecar-84d458b68c-t54dq to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t54dq AddedInterface Add eth0 [10.130.0.91/23] from ovn-kubernetes multus logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t54dq.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t54dq.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:14 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-t54dq.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:14 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-t54dq replicaset-controller logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:14 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6 Binding Scheduled Successfully assigned kuttl-test-key-hookworm/vertx-create-span-sidecar-7b4b98566d-6qfk6 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6 AddedInterface Add eth0 [10.130.0.92/23] from ovn-kubernetes multus logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-7b4b98566d SuccessfulCreate Created pod: vertx-create-span-sidecar-7b4b98566d-6qfk6 replicaset-controller logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:17 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-7b4b98566d to 1 deployment-controller logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:18 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | 2024-09-19 02:13:18 +0000 UTC Normal Pod vertx-create-span-sidecar-7b4b98566d-6qfk6.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:13:19 | sidecar-skip-webhook | Deleting namespace: kuttl-test-key-hookworm === CONT kuttl/harness/sidecar-deployment logger.go:42: 02:13:25 | sidecar-deployment | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:13:25 | sidecar-deployment | Creating namespace: kuttl-test-quick-coral logger.go:42: 02:13:25 | sidecar-deployment/0-install | starting test step 0-install logger.go:42: 02:13:26 | sidecar-deployment/0-install | Jaeger:kuttl-test-quick-coral/agent-as-sidecar created logger.go:42: 02:13:29 | sidecar-deployment/0-install | test step completed 0-install logger.go:42: 02:13:29 | sidecar-deployment/1-install | starting test step 1-install logger.go:42: 02:13:29 | sidecar-deployment/1-install | Deployment:kuttl-test-quick-coral/vertx-create-span-sidecar created logger.go:42: 02:13:31 | sidecar-deployment/1-install | test step completed 1-install logger.go:42: 02:13:31 | sidecar-deployment/2-enable-injection | starting test step 2-enable-injection logger.go:42: 02:13:31 | sidecar-deployment/2-enable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=true --namespace kuttl-test-quick-coral] logger.go:42: 02:13:31 | sidecar-deployment/2-enable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 02:13:32 | sidecar-deployment/2-enable-injection | test step completed 2-enable-injection logger.go:42: 02:13:32 | sidecar-deployment/3-find-service | starting test step 3-find-service logger.go:42: 02:13:33 | sidecar-deployment/3-find-service | Job:kuttl-test-quick-coral/00-find-service created logger.go:42: 02:13:46 | sidecar-deployment/3-find-service | test step completed 3-find-service logger.go:42: 02:13:46 | sidecar-deployment/4-other-instance | starting test step 4-other-instance logger.go:42: 02:13:46 | sidecar-deployment/4-other-instance | Jaeger:kuttl-test-quick-coral/agent-as-sidecar2 created logger.go:42: 02:13:49 | sidecar-deployment/4-other-instance | test step completed 4-other-instance logger.go:42: 02:13:49 | sidecar-deployment/5-delete-first-instance | starting test step 5-delete-first-instance logger.go:42: 02:13:50 | sidecar-deployment/5-delete-first-instance | test step completed 5-delete-first-instance logger.go:42: 02:13:50 | sidecar-deployment/6-find-service | starting test step 6-find-service logger.go:42: 02:13:50 | sidecar-deployment/6-find-service | Job:kuttl-test-quick-coral/01-find-service created logger.go:42: 02:14:03 | sidecar-deployment/6-find-service | test step completed 6-find-service logger.go:42: 02:14:03 | sidecar-deployment/7-disable-injection | starting test step 7-disable-injection logger.go:42: 02:14:03 | sidecar-deployment/7-disable-injection | running command: [kubectl annotate --overwrite deployment vertx-create-span-sidecar sidecar.jaegertracing.io/inject=false --namespace kuttl-test-quick-coral] logger.go:42: 02:14:03 | sidecar-deployment/7-disable-injection | deployment.apps/vertx-create-span-sidecar annotated logger.go:42: 02:14:05 | sidecar-deployment/7-disable-injection | test step completed 7-disable-injection logger.go:42: 02:14:05 | sidecar-deployment | sidecar-deployment events from ns kuttl-test-quick-coral: logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:26 +0000 UTC Normal Pod agent-as-sidecar-688787699f-9vqjm Binding Scheduled Successfully assigned kuttl-test-quick-coral/agent-as-sidecar-688787699f-9vqjm to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:26 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar-688787699f SuccessfulCreate Created pod: agent-as-sidecar-688787699f-9vqjm replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:26 +0000 UTC Normal Deployment.apps agent-as-sidecar ScalingReplicaSet Scaled up replica set agent-as-sidecar-688787699f to 1 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:27 +0000 UTC Normal Pod agent-as-sidecar-688787699f-9vqjm AddedInterface Add eth0 [10.130.0.93/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:27 +0000 UTC Normal Pod agent-as-sidecar-688787699f-9vqjm.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:27 +0000 UTC Normal Pod agent-as-sidecar-688787699f-9vqjm.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:27 +0000 UTC Normal Pod agent-as-sidecar-688787699f-9vqjm.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:29 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-84d458b68c to 1 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:30 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8djj7 Binding Scheduled Successfully assigned kuttl-test-quick-coral/vertx-create-span-sidecar-84d458b68c-8djj7 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:30 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8djj7 AddedInterface Add eth0 [10.130.0.94/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:30 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:30 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:30 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:30 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulCreate Created pod: vertx-create-span-sidecar-84d458b68c-8djj7 replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:31 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b Binding Scheduled Successfully assigned kuttl-test-quick-coral/vertx-create-span-sidecar-d47c6864c-tmh4b to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:31 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b AddedInterface Add eth0 [10.130.0.95/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:31 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:31 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-d47c6864c SuccessfulCreate Created pod: vertx-create-span-sidecar-d47c6864c-tmh4b replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:31 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-d47c6864c to 1 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:32 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:32 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:32 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:32 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:32 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:33 +0000 UTC Normal Pod 00-find-service-xhhzt Binding Scheduled Successfully assigned kuttl-test-quick-coral/00-find-service-xhhzt to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:33 +0000 UTC Normal Pod 00-find-service-xhhzt AddedInterface Add eth0 [10.130.0.96/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:33 +0000 UTC Normal Pod 00-find-service-xhhzt.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:33 +0000 UTC Normal Job.batch 00-find-service SuccessfulCreate Created pod: 00-find-service-xhhzt job-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:34 +0000 UTC Normal Pod 00-find-service-xhhzt.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 833ms (833ms including waiting) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:34 +0000 UTC Normal Pod 00-find-service-xhhzt.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:34 +0000 UTC Normal Pod 00-find-service-xhhzt.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:38 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.94:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:38 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.130.0.94:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:40 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:40 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.94:8080/": read tcp 10.130.0.2:38040->10.130.0.94:8080: read: connection reset by peer kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:40 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.94:8080/": dial tcp 10.130.0.94:8080: connect: connection refused kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:40 +0000 UTC Warning Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.130.0.95:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:40 +0000 UTC Warning Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.95:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:43 +0000 UTC Normal Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:43 +0000 UTC Warning Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.95:8080/": read tcp 10.130.0.2:52470->10.130.0.95:8080: read: connection reset by peer kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:43 +0000 UTC Warning Pod vertx-create-span-sidecar-d47c6864c-tmh4b.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.95:8080/": dial tcp 10.130.0.95:8080: connect: connection refused kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:45 +0000 UTC Normal Job.batch 00-find-service Completed Job completed job-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:46 +0000 UTC Normal Pod agent-as-sidecar2-5b8864d487-9w78g Binding Scheduled Successfully assigned kuttl-test-quick-coral/agent-as-sidecar2-5b8864d487-9w78g to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:46 +0000 UTC Normal ReplicaSet.apps agent-as-sidecar2-5b8864d487 SuccessfulCreate Created pod: agent-as-sidecar2-5b8864d487-9w78g replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:46 +0000 UTC Normal Deployment.apps agent-as-sidecar2 ScalingReplicaSet Scaled up replica set agent-as-sidecar2-5b8864d487 to 1 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:47 +0000 UTC Normal Pod agent-as-sidecar2-5b8864d487-9w78g AddedInterface Add eth0 [10.130.0.97/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:47 +0000 UTC Normal Pod agent-as-sidecar2-5b8864d487-9w78g.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:47 +0000 UTC Normal Pod agent-as-sidecar2-5b8864d487-9w78g.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:47 +0000 UTC Normal Pod agent-as-sidecar2-5b8864d487-9w78g.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal Pod 01-find-service-v5csk Binding Scheduled Successfully assigned kuttl-test-quick-coral/01-find-service-v5csk to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal Job.batch 01-find-service SuccessfulCreate Created pod: 01-find-service-v5csk job-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal Pod agent-as-sidecar-688787699f-9vqjm.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn Binding Scheduled Successfully assigned kuttl-test-quick-coral/vertx-create-span-sidecar-5874bd5fc9-655hn to ip-10-0-52-194.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-5874bd5fc9 SuccessfulCreate Created pod: vertx-create-span-sidecar-5874bd5fc9-655hn replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Killing Stopping container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.130.0.94:8080/": read tcp 10.130.0.2:37768->10.130.0.94:8080: read: connection reset by peer kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Warning Pod vertx-create-span-sidecar-84d458b68c-8djj7.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.130.0.94:8080/": read tcp 10.130.0.2:37784->10.130.0.94:8080: read: connection reset by peer kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-84d458b68c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-84d458b68c-8djj7 replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-84d458b68c to 0 from 1 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:50 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-5874bd5fc9 to 1 from 0 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod 01-find-service-v5csk AddedInterface Add eth0 [10.130.0.98/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod 01-find-service-v5csk.spec.containers{asserts-container} Pulling Pulling image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod 01-find-service-v5csk.spec.containers{asserts-container} Pulled Successfully pulled image "quay.io/rhn_support_ikanse/jaeger-asserts:latest" in 824ms (824ms including waiting) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod 01-find-service-v5csk.spec.containers{asserts-container} Created Created container asserts-container kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn AddedInterface Add eth0 [10.129.0.33/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:51 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:13:52 +0000 UTC Normal Pod 01-find-service-v5csk.spec.containers{asserts-container} Started Started container asserts-container kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:00 +0000 UTC Warning Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.0.33:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:00 +0000 UTC Warning Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Unhealthy Liveness probe failed: Get "http://10.129.0.33:8080/": context deadline exceeded (Client.Timeout exceeded while awaiting headers) kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:02 +0000 UTC Normal Job.batch 01-find-service Completed Job completed job-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:02 +0000 UTC Normal Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Killing Container vertx-create-span-sidecar failed liveness probe, will be restarted kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:02 +0000 UTC Warning Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.0.33:8080/": read tcp 10.129.0.2:33594->10.129.0.33:8080: read: connection reset by peer kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:02 +0000 UTC Warning Pod vertx-create-span-sidecar-5874bd5fc9-655hn.spec.containers{vertx-create-span-sidecar} Unhealthy Readiness probe failed: Get "http://10.129.0.33:8080/": dial tcp 10.129.0.33:8080: connect: connection refused kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:03 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc44db949-jpv4t Binding Scheduled Successfully assigned kuttl-test-quick-coral/vertx-create-span-sidecar-6bc44db949-jpv4t to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:03 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-6bc44db949 SuccessfulCreate Created pod: vertx-create-span-sidecar-6bc44db949-jpv4t replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:03 +0000 UTC Normal ReplicaSet.apps vertx-create-span-sidecar-d47c6864c SuccessfulDelete Deleted pod: vertx-create-span-sidecar-d47c6864c-tmh4b replicaset-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:03 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled down replica set vertx-create-span-sidecar-d47c6864c to 0 from 1 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:03 +0000 UTC Normal Deployment.apps vertx-create-span-sidecar ScalingReplicaSet Scaled up replica set vertx-create-span-sidecar-6bc44db949 to 1 from 0 deployment-controller logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:04 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc44db949-jpv4t AddedInterface Add eth0 [10.130.0.99/23] from ovn-kubernetes multus logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:04 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc44db949-jpv4t.spec.containers{vertx-create-span-sidecar} Pulled Container image "jaegertracing/vertx-create-span:operator-e2e-tests" already present on machine kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:04 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc44db949-jpv4t.spec.containers{vertx-create-span-sidecar} Created Created container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | 2024-09-19 02:14:04 +0000 UTC Normal Pod vertx-create-span-sidecar-6bc44db949-jpv4t.spec.containers{vertx-create-span-sidecar} Started Started container vertx-create-span-sidecar kubelet logger.go:42: 02:14:05 | sidecar-deployment | Deleting namespace: kuttl-test-quick-coral === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (121.21s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (5.98s) --- PASS: kuttl/harness/sidecar-namespace (52.55s) --- PASS: kuttl/harness/sidecar-skip-webhook (15.81s) --- PASS: kuttl/harness/sidecar-deployment (46.28s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name sidecar --report --output /logs/artifacts/sidecar.xml ./artifacts/kuttl-report.xml time="2024-09-19T02:14:12Z" level=debug msg="Setting a new name for the test suites" time="2024-09-19T02:14:12Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-09-19T02:14:12Z" level=debug msg="normalizing test case names" time="2024-09-19T02:14:12Z" level=debug msg="sidecar/artifacts -> sidecar_artifacts" time="2024-09-19T02:14:12Z" level=debug msg="sidecar/sidecar-namespace -> sidecar_sidecar_namespace" time="2024-09-19T02:14:12Z" level=debug msg="sidecar/sidecar-skip-webhook -> sidecar_sidecar_skip_webhook" time="2024-09-19T02:14:12Z" level=debug msg="sidecar/sidecar-deployment -> sidecar_sidecar_deployment" +------------------------------+--------+ | NAME | RESULT | +------------------------------+--------+ | sidecar_artifacts | passed | | sidecar_sidecar_namespace | passed | | sidecar_sidecar_skip_webhook | passed | | sidecar_sidecar_deployment | passed | +------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh streaming false true + '[' 3 -ne 3 ']' + test_suite_name=streaming + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/streaming.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-streaming make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true KAFKA_VERSION=3.6.0 \ SKIP_KAFKA=false \ SKIP_ES_EXTERNAL=true \ ./tests/e2e/streaming/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 88m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 88m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/streaming/render.sh ++ export SUITE_DIR=./tests/e2e/streaming ++ SUITE_DIR=./tests/e2e/streaming ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/streaming ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + is_secured=false + '[' true = true ']' + is_secured=true + '[' false = true ']' + start_test streaming-simple + '[' 1 -ne 1 ']' + test_name=streaming-simple + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-simple' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-simple\e[0m' Rendering files for test streaming-simple + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + mkdir -p streaming-simple + cd streaming-simple + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/streaming-jaeger-assert.yaml.template -o ./04-assert.yaml + render_smoke_test simple-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=simple-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://simple-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://simple-streaming-collector-headless:14268 + export JAEGER_NAME=simple-streaming + JAEGER_NAME=simple-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-tls + '[' 1 -ne 1 ']' + test_name=streaming-with-tls + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-tls' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-tls\e[0m' Rendering files for test streaming-with-tls + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-simple + '[' streaming-simple '!=' _build ']' + cd .. + mkdir -p streaming-with-tls + cd streaming-with-tls + render_install_kafka my-cluster 00 + '[' 2 -ne 2 ']' + cluster_name=my-cluster + test_step=00 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/kafka-install.yaml.template -o ./00-install.yaml + render_assert_kafka false my-cluster 00 + '[' 3 -ne 3 ']' + autoprovisioned=false + cluster_name=my-cluster + test_step=00 + '[' false = true ']' + '[' false = true ']' + '[' false = false ']' + replicas=1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./00-assert.yaml ++ expr 00 + 1 + CLUSTER_NAME=my-cluster + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./01-assert.yaml ++ expr 00 + 2 + CLUSTER_NAME=my-cluster + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./02-assert.yaml + render_install_elasticsearch upstream 03 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=03 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./03-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./03-assert.yaml + render_smoke_test tls-streaming true 05 + '[' 3 -ne 3 ']' + jaeger=tls-streaming + is_secured=true + test_step=05 + '[' true = true ']' + protocol=https:// + query_port=:443 + template=/tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template + export JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + JAEGER_QUERY_ENDPOINT=https://tls-streaming-query:443 + export JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + JAEGER_COLLECTOR_ENDPOINT=http://tls-streaming-collector-headless:14268 + export JAEGER_NAME=tls-streaming + JAEGER_NAME=tls-streaming + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/smoke-test.yaml.template -o ./05-smoke-test.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/smoke-test-assert.yaml.template -o ./05-assert.yaml + unset JAEGER_NAME + unset JAEGER_QUERY_ENDPOINT + unset JAEGER_COLLECTOR_ENDPOINT + '[' false = true ']' + start_test streaming-with-autoprovisioning-autoscale + '[' 1 -ne 1 ']' + test_name=streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== + info 'Rendering files for test streaming-with-autoprovisioning-autoscale' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test streaming-with-autoprovisioning-autoscale\e[0m' Rendering files for test streaming-with-autoprovisioning-autoscale + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-tls + '[' streaming-with-tls '!=' _build ']' + cd .. + mkdir -p streaming-with-autoprovisioning-autoscale + cd streaming-with-autoprovisioning-autoscale + '[' true = true ']' + rm ./00-install.yaml ./00-assert.yaml + render_install_elasticsearch upstream 01 + '[' 2 -ne 2 ']' + deploy_mode=upstream + test_step=01 + '[' upstream = upstream ']' + '[' true = true ']' + template=/tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template + /tmp/jaeger-tests/bin/yq eval -s '"elasticsearch_" + $index' /tmp/jaeger-tests/tests/elasticsearch.yml + /tmp/jaeger-tests/bin/yq eval -i '.spec.template.spec.serviceAccountName="deploy-elasticsearch"' ./elasticsearch_0.yml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/elasticsearch-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/elasticsearch-assert.yaml.template -o ./01-assert.yaml + jaeger_name=auto-provisioned + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="20Mi"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i '.spec.ingester.resources.requests.memory="500m"' ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.autoscale=true ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.minReplicas=1 ./02-install.yaml + /tmp/jaeger-tests/bin/yq e -i .spec.ingester.maxReplicas=2 ./02-install.yaml + render_assert_kafka true auto-provisioned 03 + '[' 3 -ne 3 ']' + autoprovisioned=true + cluster_name=auto-provisioned + test_step=03 + '[' true = true ']' + is_kafka_minimal_enabled + namespaces=(observability openshift-operators openshift-distributed-tracing) + for i in "${namespaces[@]}" ++ kubectl get pods -n observability -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-operators -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled= + '[' '' == true ']' + for i in "${namespaces[@]}" ++ kubectl get pods -n openshift-distributed-tracing -l name=jaeger-operator -o yaml ++ /tmp/jaeger-tests/bin/yq e '.items[0].spec.containers[0].env[] | select(.name=="KAFKA-PROVISIONING-MINIMAL").value' + enabled=true + '[' true == true ']' + return 0 + replicas=1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-zookeeper-cluster.yaml.template -o ./03-assert.yaml ++ expr 03 + 1 + CLUSTER_NAME=auto-provisioned + REPLICAS=1 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-kafka-cluster.yaml.template -o ./04-assert.yaml ++ expr 03 + 2 + CLUSTER_NAME=auto-provisioned + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-entity-operator.yaml.template -o ./05-assert.yaml + version_lt 1.30 1.23 ++ echo 1.30 1.23 ++ tr ' ' '\n' ++ sort -rV ++ head -n 1 + test 1.30 '!=' 1.30 + rm ./08-assert.yaml + skip_test streaming-with-tls 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=streaming-with-tls + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build/streaming-with-autoprovisioning-autoscale + '[' streaming-with-autoprovisioning-autoscale '!=' _build ']' + cd .. + rm -rf streaming-with-tls + warning 'streaming-with-tls: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: streaming-with-tls: This test is flaky in Prow CI\e[0m' WAR: streaming-with-tls: This test is flaky in Prow CI + skip_test streaming-simple 'This test is flaky in Prow CI' + '[' 2 -ne 2 ']' + test_name=streaming-simple + message='This test is flaky in Prow CI' +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/streaming/_build + '[' _build '!=' _build ']' + rm -rf streaming-simple + warning 'streaming-simple: This test is flaky in Prow CI' + '[' 1 -ne 1 ']' + echo -e '\e[1;33mWAR: streaming-simple: This test is flaky in Prow CI\e[0m' WAR: streaming-simple: This test is flaky in Prow CI make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running streaming E2E tests' Running streaming E2E tests + cd tests/e2e/streaming/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1829818249 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-rosa-h-a4mj.qd6c.s3.devshift.org:443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 2 tests === RUN kuttl/harness === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/streaming-with-autoprovisioning-autoscale === PAUSE kuttl/harness/streaming-with-autoprovisioning-autoscale === CONT kuttl/harness/artifacts logger.go:42: 02:14:25 | artifacts | Creating namespace: kuttl-test-valued-bullfrog logger.go:42: 02:14:25 | artifacts | artifacts events from ns kuttl-test-valued-bullfrog: logger.go:42: 02:14:25 | artifacts | Deleting namespace: kuttl-test-valued-bullfrog === CONT kuttl/harness/streaming-with-autoprovisioning-autoscale logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale | Ignoring README.md as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_0.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale | Ignoring elasticsearch_1.yml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale | Creating namespace: kuttl-test-hopeful-chicken logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale/1-install | starting test step 1-install logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc create sa deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale/1-install | serviceaccount/deploy-elasticsearch created logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c oc adm policy add-scc-to-user privileged -z deploy-elasticsearch -n $NAMESPACE 2>&1 | grep -v "already exists" || true] logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale/1-install | clusterrole.rbac.authorization.k8s.io/system:openshift:scc:privileged added: "deploy-elasticsearch" logger.go:42: 02:14:32 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 6] logger.go:42: 02:14:38 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_0.yml -n $NAMESPACE] logger.go:42: 02:14:39 | streaming-with-autoprovisioning-autoscale/1-install | statefulset.apps/elasticsearch created logger.go:42: 02:14:39 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c sleep 3] logger.go:42: 02:14:42 | streaming-with-autoprovisioning-autoscale/1-install | running command: [sh -c kubectl apply -f elasticsearch_1.yml -n $NAMESPACE] logger.go:42: 02:14:43 | streaming-with-autoprovisioning-autoscale/1-install | service/elasticsearch created logger.go:42: 02:15:00 | streaming-with-autoprovisioning-autoscale/1-install | test step completed 1-install logger.go:42: 02:15:00 | streaming-with-autoprovisioning-autoscale/2-install | starting test step 2-install logger.go:42: 02:15:00 | streaming-with-autoprovisioning-autoscale/2-install | Jaeger:kuttl-test-hopeful-chicken/auto-provisioned created logger.go:42: 02:15:00 | streaming-with-autoprovisioning-autoscale/2-install | test step completed 2-install logger.go:42: 02:15:00 | streaming-with-autoprovisioning-autoscale/3- | starting test step 3- logger.go:42: 02:15:44 | streaming-with-autoprovisioning-autoscale/3- | test step completed 3- logger.go:42: 02:15:44 | streaming-with-autoprovisioning-autoscale/4- | starting test step 4- logger.go:42: 02:16:15 | streaming-with-autoprovisioning-autoscale/4- | test step completed 4- logger.go:42: 02:16:15 | streaming-with-autoprovisioning-autoscale/5- | starting test step 5- logger.go:42: 02:16:38 | streaming-with-autoprovisioning-autoscale/5- | test step completed 5- logger.go:42: 02:16:38 | streaming-with-autoprovisioning-autoscale/6- | starting test step 6- logger.go:42: 02:16:50 | streaming-with-autoprovisioning-autoscale/6- | test step completed 6- logger.go:42: 02:16:50 | streaming-with-autoprovisioning-autoscale/7- | starting test step 7- logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale/7- | test step completed 7- logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | streaming-with-autoprovisioning-autoscale events from ns kuttl-test-hopeful-chicken: logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:39 +0000 UTC Normal Pod elasticsearch-0 Binding Scheduled Successfully assigned kuttl-test-hopeful-chicken/elasticsearch-0 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:39 +0000 UTC Normal StatefulSet.apps elasticsearch SuccessfulCreate create Pod elasticsearch-0 in StatefulSet elasticsearch successful statefulset-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:40 +0000 UTC Normal Pod elasticsearch-0 AddedInterface Add eth0 [10.130.0.100/23] from ovn-kubernetes multus logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:40 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulling Pulling image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:49 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Pulled Successfully pulled image "docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.6" in 9.456s (9.456s including waiting) kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:49 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Created Created container elasticsearch kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:49 +0000 UTC Normal Pod elasticsearch-0.spec.containers{elasticsearch} Started Started container elasticsearch kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:14:55 +0000 UTC Warning Pod elasticsearch-0.spec.containers{elasticsearch} Unhealthy Readiness probe failed: Get "http://10.130.0.100:9200/": dial tcp 10.130.0.100:9200: connect: connection refused kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:04 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:05 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-zookeeper NoPods No matching pods found controllermanager logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:05 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-hopeful-chicken/data-auto-provisioned-zookeeper-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7b76855fbd-rmdnk_9014896d-0999-4512-86fc-657b261d6e94 logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:05 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:09 +0000 UTC Normal PersistentVolumeClaim data-auto-provisioned-zookeeper-0 ProvisioningSucceeded Successfully provisioned volume pvc-a2b73141-9c90-4c52-b190-1a336f05d4b0 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7b76855fbd-rmdnk_9014896d-0999-4512-86fc-657b261d6e94 logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:10 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 Binding Scheduled Successfully assigned kuttl-test-hopeful-chicken/auto-provisioned-zookeeper-0 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:12 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-a2b73141-9c90-4c52-b190-1a336f05d4b0" attachdetach-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:13 +0000 UTC Normal Pod auto-provisioned-zookeeper-0 AddedInterface Add eth0 [10.130.0.101/23] from ovn-kubernetes multus logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:13 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulling Pulling image "registry.redhat.io/amq-streams/kafka-37-rhel9@sha256:42bf60ce31540dd61fab2c9886d791e41f063ea6f86628694b9e60e49bc8951b" kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:28 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Pulled Successfully pulled image "registry.redhat.io/amq-streams/kafka-37-rhel9@sha256:42bf60ce31540dd61fab2c9886d791e41f063ea6f86628694b9e60e49bc8951b" in 14.229s (14.229s including waiting) kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:28 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Created Created container zookeeper kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:28 +0000 UTC Normal Pod auto-provisioned-zookeeper-0.spec.containers{zookeeper} Started Started container zookeeper kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:46 +0000 UTC Normal PodDisruptionBudget.policy auto-provisioned-kafka NoPods No matching pods found controllermanager logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:46 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 WaitForFirstConsumer waiting for first consumer to be created before binding persistentvolume-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:46 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ExternalProvisioning Waiting for a volume to be created either by the external provisioner 'ebs.csi.aws.com' or manually by the system administrator. If volume creation is delayed, please verify that the provisioner is running and correctly registered. persistentvolume-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:46 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 Provisioning External provisioner is provisioning volume for claim "kuttl-test-hopeful-chicken/data-0-auto-provisioned-kafka-0" ebs.csi.aws.com_aws-ebs-csi-driver-controller-7b76855fbd-rmdnk_9014896d-0999-4512-86fc-657b261d6e94 logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:51 +0000 UTC Normal Pod auto-provisioned-kafka-0 Binding Scheduled Successfully assigned kuttl-test-hopeful-chicken/auto-provisioned-kafka-0 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:51 +0000 UTC Normal PersistentVolumeClaim data-0-auto-provisioned-kafka-0 ProvisioningSucceeded Successfully provisioned volume pvc-295ca044-5498-45fa-8ec0-25e29471ec72 ebs.csi.aws.com_aws-ebs-csi-driver-controller-7b76855fbd-rmdnk_9014896d-0999-4512-86fc-657b261d6e94 logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:54 +0000 UTC Normal Pod auto-provisioned-kafka-0 SuccessfulAttachVolume AttachVolume.Attach succeeded for volume "pvc-295ca044-5498-45fa-8ec0-25e29471ec72" attachdetach-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:55 +0000 UTC Normal Pod auto-provisioned-kafka-0 AddedInterface Add eth0 [10.130.0.102/23] from ovn-kubernetes multus logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:55 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Pulled Container image "registry.redhat.io/amq-streams/kafka-37-rhel9@sha256:42bf60ce31540dd61fab2c9886d791e41f063ea6f86628694b9e60e49bc8951b" already present on machine kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:55 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Created Created container kafka kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:15:55 +0000 UTC Normal Pod auto-provisioned-kafka-0.spec.containers{kafka} Started Started container kafka kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt Binding Scheduled Successfully assigned kuttl-test-hopeful-chicken/auto-provisioned-entity-operator-56b9fcffb5-d4htt to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt AddedInterface Add eth0 [10.130.0.103/23] from ovn-kubernetes multus logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt.spec.containers{topic-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel9-operator@sha256:95f5aa75cd1f7228e78fd4d88d786713fba4cf828dc22bc2dd1d0380909c1aef" already present on machine kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt.spec.containers{topic-operator} Created Created container topic-operator kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt.spec.containers{topic-operator} Started Started container topic-operator kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt.spec.containers{user-operator} Pulled Container image "registry.redhat.io/amq-streams/strimzi-rhel9-operator@sha256:95f5aa75cd1f7228e78fd4d88d786713fba4cf828dc22bc2dd1d0380909c1aef" already present on machine kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal ReplicaSet.apps auto-provisioned-entity-operator-56b9fcffb5 SuccessfulCreate Created pod: auto-provisioned-entity-operator-56b9fcffb5-d4htt replicaset-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:17 +0000 UTC Normal Deployment.apps auto-provisioned-entity-operator ScalingReplicaSet Scaled up replica set auto-provisioned-entity-operator-56b9fcffb5 to 1 deployment-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt.spec.containers{user-operator} Created Created container user-operator kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:18 +0000 UTC Normal Pod auto-provisioned-entity-operator-56b9fcffb5-d4htt.spec.containers{user-operator} Started Started container user-operator kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-collector-b7fdf8ccb-qlvrj Binding Scheduled Successfully assigned kuttl-test-hopeful-chicken/auto-provisioned-collector-b7fdf8ccb-qlvrj to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-collector-b7fdf8ccb-qlvrj AddedInterface Add eth0 [10.130.0.104/23] from ovn-kubernetes multus logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-collector-b7fdf8ccb-qlvrj.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-collector-b7fdf8ccb-qlvrj.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-collector-b7fdf8ccb-qlvrj.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal ReplicaSet.apps auto-provisioned-collector-b7fdf8ccb SuccessfulCreate Created pod: auto-provisioned-collector-b7fdf8ccb-qlvrj replicaset-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Deployment.apps auto-provisioned-collector ScalingReplicaSet Scaled up replica set auto-provisioned-collector-b7fdf8ccb to 1 deployment-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-ingester-588bf485f5-9xwfs Binding Scheduled Successfully assigned kuttl-test-hopeful-chicken/auto-provisioned-ingester-588bf485f5-9xwfs to ip-10-0-52-194.us-west-2.compute.internal default-scheduler logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-ingester-588bf485f5-9xwfs AddedInterface Add eth0 [10.129.0.34/23] from ovn-kubernetes multus logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-ingester-588bf485f5-9xwfs.spec.containers{jaeger-ingester} Pulling Pulling image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:7f435686c86e24ca85e6ebfa2f3eb5a456a81a8c2d4df56e3f7b68fff46eaf85" kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal ReplicaSet.apps auto-provisioned-ingester-588bf485f5 SuccessfulCreate Created pod: auto-provisioned-ingester-588bf485f5-9xwfs replicaset-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Deployment.apps auto-provisioned-ingester ScalingReplicaSet Scaled up replica set auto-provisioned-ingester-588bf485f5 to 1 deployment-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c Binding Scheduled Successfully assigned kuttl-test-hopeful-chicken/auto-provisioned-query-767df9db98-jvv5c to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c AddedInterface Add eth0 [10.130.0.105/23] from ovn-kubernetes multus logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Pod auto-provisioned-query-767df9db98-jvv5c.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal ReplicaSet.apps auto-provisioned-query-767df9db98 SuccessfulCreate Created pod: auto-provisioned-query-767df9db98-jvv5c replicaset-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:41 +0000 UTC Normal Deployment.apps auto-provisioned-query ScalingReplicaSet Scaled up replica set auto-provisioned-query-767df9db98 to 1 deployment-controller logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:47 +0000 UTC Normal Pod auto-provisioned-ingester-588bf485f5-9xwfs.spec.containers{jaeger-ingester} Pulled Successfully pulled image "registry.redhat.io/rhosdt/jaeger-ingester-rhel8@sha256:7f435686c86e24ca85e6ebfa2f3eb5a456a81a8c2d4df56e3f7b68fff46eaf85" in 6.116s (6.116s including waiting) kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:47 +0000 UTC Normal Pod auto-provisioned-ingester-588bf485f5-9xwfs.spec.containers{jaeger-ingester} Created Created container jaeger-ingester kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:47 +0000 UTC Normal Pod auto-provisioned-ingester-588bf485f5-9xwfs.spec.containers{jaeger-ingester} Started Started container jaeger-ingester kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | 2024-09-19 02:16:48 +0000 UTC Warning Pod auto-provisioned-ingester-588bf485f5-9xwfs.spec.containers{jaeger-ingester} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 02:16:51 | streaming-with-autoprovisioning-autoscale | Deleting namespace: kuttl-test-hopeful-chicken === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- PASS: kuttl (186.15s) --- PASS: kuttl/harness (0.00s) --- PASS: kuttl/harness/artifacts (6.40s) --- PASS: kuttl/harness/streaming-with-autoprovisioning-autoscale (179.17s) PASS + exit_code=0 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name streaming --report --output /logs/artifacts/streaming.xml ./artifacts/kuttl-report.xml time="2024-09-19T02:17:31Z" level=debug msg="Setting a new name for the test suites" time="2024-09-19T02:17:31Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-09-19T02:17:31Z" level=debug msg="normalizing test case names" time="2024-09-19T02:17:31Z" level=debug msg="streaming/artifacts -> streaming_artifacts" time="2024-09-19T02:17:31Z" level=debug msg="streaming/streaming-with-autoprovisioning-autoscale -> streaming_streaming_with_autoprovisioning_autoscale" +-----------------------------------------------------+--------+ | NAME | RESULT | +-----------------------------------------------------+--------+ | streaming_artifacts | passed | | streaming_streaming_with_autoprovisioning_autoscale | passed | +-----------------------------------------------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests' make[1]: Entering directory '/tmp/jaeger-tests' KAFKA_OLM=true ./hack/run-e2e-test-suite.sh ui false true + '[' 3 -ne 3 ']' + test_suite_name=ui + use_kind_cluster=false + jaeger_olm=true + timeout=5m + make prepare-e2e-tests USE_KIND_CLUSTER=false JAEGER_OLM=true make[2]: Entering directory '/tmp/jaeger-tests' make[2]: Nothing to be done for 'prepare-e2e-tests'. make[2]: Leaving directory '/tmp/jaeger-tests' + '[' true = true ']' + echo 'Skipping Jaeger Operator installation because JAEGER_OLM=true' Skipping Jaeger Operator installation because JAEGER_OLM=true + root_dir=/tmp/jaeger-tests/hack/../ + reports_dir=/logs/artifacts + mkdir -p /logs/artifacts + rm -f /logs/artifacts/ui.xml + cd /tmp/jaeger-tests/hack/../ + /tmp/jaeger-tests/hack/..//hack/install/install-kuttl.sh Installing kuttl kubectl-kuttl 0.15.0 is installed already + make render-e2e-tests-ui make[2]: Entering directory '/tmp/jaeger-tests' >>>> Elasticsearch image not loaded because SKIP_ES_EXTERNAL is true SKIP_ES_EXTERNAL=true ./tests/e2e/ui/render.sh +++ kubectl get clusterversion ++ output='NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 92m Cluster version is 4.16.11' ++ IS_OPENSHIFT=false ++ '[' '!' -z 'NAME VERSION AVAILABLE PROGRESSING SINCE STATUS version 4.16.11 True False 92m Cluster version is 4.16.11' ']' ++ warning 'Generating templates for an OpenShift cluster' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;33mWAR: Generating templates for an OpenShift cluster\e[0m' WAR: Generating templates for an OpenShift cluster ++ IS_OPENSHIFT=true ++ export KAFKA_USE_CUSTOM_PODSET ++ '[' -z 3.6.0 ']' ++ version_le 3.6.0 0.25.0 +++ echo 3.6.0 0.25.0 +++ tr ' ' '\n' +++ sort -V +++ head -n 1 ++ test 0.25.0 == 3.6.0 ++ KAFKA_USE_CUSTOM_PODSET=true ++ export IS_OPENSHIFT +++ dirname ./tests/e2e/ui/render.sh ++ export SUITE_DIR=./tests/e2e/ui ++ SUITE_DIR=./tests/e2e/ui ++ /tmp/jaeger-tests/hack/install/install-gomplate.sh Installing Gomplate gomplate 3.10.0 is installed already ++ /tmp/jaeger-tests/hack/install/install-yq.sh Installing yq yq 4.20.2 is installed already ++ /tmp/jaeger-tests/hack/install/install-kustomize.sh Installing kustomize kustomize 4.5.7 is installed already ++ export ELASTICSEARCH_NODECOUNT=1 ++ ELASTICSEARCH_NODECOUNT=1 ++ export ELASTICSEARCH_URL=http://elasticsearch ++ ELASTICSEARCH_URL=http://elasticsearch ++ export ELASTICSEARCH_PORT=:9200 ++ ELASTICSEARCH_PORT=:9200 ++ export CASSANDRA_SERVER=cassandra ++ CASSANDRA_SERVER=cassandra ++ export SERVICE_ACCOUNT_NAME=e2e-test ++ SERVICE_ACCOUNT_NAME=e2e-test ++ PROGRAMS_FOLDER=../../../.. ++ export ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ ASSERT_HTTP_CODE_PROGRAM=../../../../cmd-utils/assert-jaeger-http-code.sh ++ export CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ CHECK_JAEGER_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-version.sh ++ export CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ CHECK_JAEGER_OPERATOR_VERSION_PROGRAM=../../../../cmd-utils/check-jaeger-operator-version.sh ++ export GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ GET_TOKEN_PROGRAM=../../../../cmd-utils/get-token.sh ++ export TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ TEST_UI_CONFIG_PROGRAM=../../../../cmd-utils/uiconfig/main.go ++ export WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ WAIT_CRONJOB_PROGRAM=../../../../cmd-utils/wait-cronjob/main.go ++ export QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ QUERY_PROGRAM=../../../../assert-jobs/query/main.go ++ export REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ REPORTER_PROGRAM=../../../../assert-jobs/reporter/main.go ++ set -e +++ pwd ++ cd /tmp/jaeger-tests/./tests/e2e/ui ++ build_dir=_build ++ rm -rf _build ++ mkdir _build ++ find -maxdepth 1 -type d '!' -wholename . '!' -wholename ./_build ++ xargs -I '{}' cp -r '{}' _build ++ cd _build ++ info 'Rendering kuttl-test.yaml' ++ '[' 1 -ne 1 ']' ++ echo -e '\e[1;34mRendering kuttl-test.yaml\e[0m' Rendering kuttl-test.yaml ++ '[' true = true ']' ++ CRD_DIR= ++ export CRD_DIR ++ /tmp/jaeger-tests/bin/gomplate -f ../../../templates/kuttl-test.yaml.template -o ./kuttl-test.yaml ++ mkdir -p artifacts + start_test allinone + '[' 1 -ne 1 ']' + test_name=allinone + echo =========================================================================== =========================================================================== + info 'Rendering files for test allinone' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test allinone\e[0m' Rendering files for test allinone + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build + '[' _build '!=' _build ']' + mkdir -p allinone + cd allinone + export GET_URL_COMMAND + export URL + export JAEGER_NAME=all-in-one-ui + JAEGER_NAME=all-in-one-ui + '[' true = true ']' + GET_URL_COMMAND='kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE' + URL='https://$(kubectl get routes -o=jsonpath='\''{.items[0].status.ingress[0].host}'\'' -n $NAMESPACE)/search' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./01-curl.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./04-test-ui-config.yaml + start_test production + '[' 1 -ne 1 ']' + test_name=production + echo =========================================================================== =========================================================================== + info 'Rendering files for test production' + '[' 1 -ne 1 ']' + echo -e '\e[1;34mRendering files for test production\e[0m' Rendering files for test production + echo =========================================================================== =========================================================================== +++ pwd ++ basename /tmp/jaeger-tests/tests/e2e/ui/_build/allinone + '[' allinone '!=' _build ']' + cd .. + mkdir -p production + cd production + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + [[ true = true ]] + [[ true = true ]] + render_install_jaeger production-ui production_autoprovisioned 01 + '[' 3 -ne 3 ']' + export JAEGER_NAME=production-ui + JAEGER_NAME=production-ui + deploy_mode=production_autoprovisioned + test_step=01 + '[' production_autoprovisioned = allInOne ']' + '[' production_autoprovisioned = production ']' + '[' production_autoprovisioned = production_cassandra ']' + '[' production_autoprovisioned = production_autoprovisioned ']' + '[' true '!=' true ']' + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/openshift/production-jaeger-autoprovisioned-install.yaml.template -o ./01-install.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/production-jaeger-assert.yaml.template -o ./01-assert.yaml + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/ensure-ingress-host.sh.template -o ./ensure-ingress-host.sh + chmod +x ./ensure-ingress-host.sh + '[' true = true ']' + INSECURE=true + EXPECTED_CODE=403 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./02-check-forbbiden-access.yaml + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./03-curl.yaml + INSECURE=true + EXPECTED_CODE=200 + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/assert-http-code.yaml.template -o ./05-check-disabled-security.yaml + ASSERT_PRESENT=false + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./06-check-NO-gaID.yaml + ASSERT_PRESENT=true + TRACKING_ID=MyTrackingId + /tmp/jaeger-tests/bin/gomplate -f /tmp/jaeger-tests/tests/templates/test-ui-config.yaml.template -o ./08-check-gaID.yaml make[2]: Leaving directory '/tmp/jaeger-tests' + echo 'Running ui E2E tests' Running ui E2E tests + cd tests/e2e/ui/_build + set +e + KUBECONFIG=/tmp/kubeconfig-1829818249 + /tmp/jaeger-tests/hack/..//bin/kubectl-kuttl test --report xml === RUN kuttl harness.go:462: starting setup harness.go:252: running tests using configured kubeconfig. harness.go:275: Successful connection to cluster at: https://api.ci-rosa-h-a4mj.qd6c.s3.devshift.org:443 harness.go:360: running tests harness.go:73: going to run test suite with timeout of 600 seconds for each step harness.go:372: testsuite: . has 3 tests === RUN kuttl/harness === RUN kuttl/harness/allinone === PAUSE kuttl/harness/allinone === RUN kuttl/harness/artifacts === PAUSE kuttl/harness/artifacts === RUN kuttl/harness/production === PAUSE kuttl/harness/production === CONT kuttl/harness/allinone logger.go:42: 02:17:38 | allinone | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:17:38 | allinone | Creating namespace: kuttl-test-real-marten logger.go:42: 02:17:38 | allinone/0-install | starting test step 0-install logger.go:42: 02:17:39 | allinone/0-install | Jaeger:kuttl-test-real-marten/all-in-one-ui created logger.go:42: 02:17:40 | allinone/0-install | test step completed 0-install logger.go:42: 02:17:40 | allinone/1-curl | starting test step 1-curl logger.go:42: 02:17:40 | allinone/1-curl | running command: [./ensure-ingress-host.sh] logger.go:42: 02:17:40 | allinone/1-curl | Checking the Ingress host value was populated logger.go:42: 02:17:40 | allinone/1-curl | Try number 0 logger.go:42: 02:17:40 | allinone/1-curl | error: error executing jsonpath "{.items[0].status.ingress[0].host}": Error executing template: array index out of bounds: index 0, length 0. Printing more information for debugging the template: logger.go:42: 02:17:40 | allinone/1-curl | template was: logger.go:42: 02:17:40 | allinone/1-curl | {.items[0].status.ingress[0].host} logger.go:42: 02:17:40 | allinone/1-curl | object given to jsonpath engine was: logger.go:42: 02:17:40 | allinone/1-curl | map[string]interface {}{"apiVersion":"v1", "items":[]interface {}{}, "kind":"List", "metadata":map[string]interface {}{"resourceVersion":""}} logger.go:42: 02:17:40 | allinone/1-curl | logger.go:42: 02:17:40 | allinone/1-curl | logger.go:42: 02:17:50 | allinone/1-curl | Try number 1 logger.go:42: 02:17:51 | allinone/1-curl | Hostname is all-in-one-ui-kuttl-test-real-marten.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org logger.go:42: 02:17:51 | allinone/1-curl | running command: [sh -c ../../../../cmd-utils/assert-jaeger-http-code.sh https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search 200 true $NAMESPACE all-in-one-ui] logger.go:42: 02:17:51 | allinone/1-curl | Checking an expected HTTP response logger.go:42: 02:17:51 | allinone/1-curl | Running in OpenShift logger.go:42: 02:17:51 | allinone/1-curl | User not provided. Getting the token... logger.go:42: 02:17:52 | allinone/1-curl | Warning: resource jaegers/all-in-one-ui is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. logger.go:42: 02:18:00 | allinone/1-curl | Try number 1/30 the https://all-in-one-ui-kuttl-test-real-marten.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org/search logger.go:42: 02:18:00 | allinone/1-curl | curl response asserted properly logger.go:42: 02:18:00 | allinone/1-curl | test step completed 1-curl logger.go:42: 02:18:00 | allinone/2-delete | starting test step 2-delete logger.go:42: 02:18:01 | allinone/2-delete | Jaeger:kuttl-test-real-marten/all-in-one-ui created logger.go:42: 02:18:01 | allinone/2-delete | test step completed 2-delete logger.go:42: 02:18:01 | allinone/3-install | starting test step 3-install logger.go:42: 02:18:01 | allinone/3-install | Jaeger:kuttl-test-real-marten/all-in-one-ui updated logger.go:42: 02:18:01 | allinone/3-install | test step completed 3-install logger.go:42: 02:18:01 | allinone/4-test-ui-config | starting test step 4-test-ui-config logger.go:42: 02:18:01 | allinone/4-test-ui-config | running command: [./ensure-ingress-host.sh] logger.go:42: 02:18:01 | allinone/4-test-ui-config | Checking the Ingress host value was populated logger.go:42: 02:18:01 | allinone/4-test-ui-config | Try number 0 logger.go:42: 02:18:01 | allinone/4-test-ui-config | Hostname is all-in-one-ui-kuttl-test-real-marten.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org logger.go:42: 02:18:01 | allinone/4-test-ui-config | running command: [sh -c ASSERT_PRESENT=true EXPECTED_CONTENT=MyTrackingId QUERY_HOSTNAME=https://$(kubectl get routes -o=jsonpath='{.items[0].status.ingress[0].host}' -n $NAMESPACE)/search go run ../../../../cmd-utils/uiconfig/main.go] logger.go:42: 02:18:02 | allinone/4-test-ui-config | time="2024-09-19T02:18:02Z" level=info msg="Querying https://all-in-one-ui-kuttl-test-real-marten.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org/search..." logger.go:42: 02:18:02 | allinone/4-test-ui-config | time="2024-09-19T02:18:02Z" level=info msg="No secret provided for the Authorization header" logger.go:42: 02:18:02 | allinone/4-test-ui-config | time="2024-09-19T02:18:02Z" level=info msg="Polling to https://all-in-one-ui-kuttl-test-real-marten.apps.rosa.ci-rosa-h-a4mj.qd6c.s3.devshift.org/search" logger.go:42: 02:18:02 | allinone/4-test-ui-config | time="2024-09-19T02:18:02Z" level=info msg="Doing request number 0" logger.go:42: 02:18:22 | allinone/4-test-ui-config | time="2024-09-19T02:18:22Z" level=warning msg="Status code: 503" logger.go:42: 02:18:22 | allinone/4-test-ui-config | time="2024-09-19T02:18:22Z" level=info msg="Doing request number 1" logger.go:42: 02:18:22 | allinone/4-test-ui-config | time="2024-09-19T02:18:22Z" level=info msg="Content found and asserted!" logger.go:42: 02:18:22 | allinone/4-test-ui-config | time="2024-09-19T02:18:22Z" level=info msg="Success!" logger.go:42: 02:18:23 | allinone/4-test-ui-config | test step completed 4-test-ui-config logger.go:42: 02:18:23 | allinone | allinone events from ns kuttl-test-real-marten: logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:39 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6 Binding Scheduled Successfully assigned kuttl-test-real-marten/all-in-one-ui-59bf599547-hf7r6 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:39 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-59bf599547 SuccessfulCreate Created pod: all-in-one-ui-59bf599547-hf7r6 replicaset-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:39 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-59bf599547 to 1 deployment-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:40 +0000 UTC Warning Pod all-in-one-ui-59bf599547-hf7r6 FailedMount MountVolume.SetUp failed for volume "all-in-one-ui-ui-oauth-proxy-tls" : secret "all-in-one-ui-ui-oauth-proxy-tls" not found kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:40 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6 AddedInterface Add eth0 [10.130.0.106/23] from ovn-kubernetes multus logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:40 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:40 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:40 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:40 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:41 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:41 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:53 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:53 +0000 UTC Normal Pod all-in-one-ui-59bf599547-hf7r6.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:53 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-59bf599547 SuccessfulDelete Deleted pod: all-in-one-ui-59bf599547-hf7r6 replicaset-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:53 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-59bf599547 to 0 from 1 deployment-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:54 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp Binding Scheduled Successfully assigned kuttl-test-real-marten/all-in-one-ui-76486bd4c6-mflqp to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:54 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-76486bd4c6 SuccessfulCreate Created pod: all-in-one-ui-76486bd4c6-mflqp replicaset-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:54 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-76486bd4c6 to 1 deployment-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:55 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp AddedInterface Add eth0 [10.130.0.107/23] from ovn-kubernetes multus logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:55 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:55 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:55 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:55 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:55 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:17:55 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:00 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:00 +0000 UTC Normal Pod all-in-one-ui-76486bd4c6-mflqp.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:01 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf Binding Scheduled Successfully assigned kuttl-test-real-marten/all-in-one-ui-64ddb84684-l4smf to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:01 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-64ddb84684 SuccessfulCreate Created pod: all-in-one-ui-64ddb84684-l4smf replicaset-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:01 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-64ddb84684 to 1 deployment-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf AddedInterface Add eth0 [10.130.0.108/23] from ovn-kubernetes multus logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-64ddb84684 SuccessfulDelete Deleted pod: all-in-one-ui-64ddb84684-l4smf replicaset-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:02 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled down replica set all-in-one-ui-64ddb84684 to 0 from 1 deployment-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:03 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{jaeger} Killing Stopping container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:03 +0000 UTC Normal Pod all-in-one-ui-64ddb84684-l4smf.spec.containers{oauth-proxy} Killing Stopping container oauth-proxy kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:04 +0000 UTC Normal Pod all-in-one-ui-cb55b76b9-kzxlc Binding Scheduled Successfully assigned kuttl-test-real-marten/all-in-one-ui-cb55b76b9-kzxlc to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:04 +0000 UTC Normal Pod all-in-one-ui-cb55b76b9-kzxlc AddedInterface Add eth0 [10.130.0.109/23] from ovn-kubernetes multus logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:04 +0000 UTC Normal Pod all-in-one-ui-cb55b76b9-kzxlc.spec.containers{jaeger} Pulled Container image "registry.redhat.io/rhosdt/jaeger-all-in-one-rhel8@sha256:1d8eef711323bbd14830846b3267011dd20cb1b15b84f16ce514e19c65531d34" already present on machine kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:04 +0000 UTC Normal ReplicaSet.apps all-in-one-ui-cb55b76b9 SuccessfulCreate Created pod: all-in-one-ui-cb55b76b9-kzxlc replicaset-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:04 +0000 UTC Normal Deployment.apps all-in-one-ui ScalingReplicaSet Scaled up replica set all-in-one-ui-cb55b76b9 to 1 deployment-controller logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:05 +0000 UTC Normal Pod all-in-one-ui-cb55b76b9-kzxlc.spec.containers{jaeger} Created Created container jaeger kubelet logger.go:42: 02:18:23 | allinone | 2024-09-19 02:18:05 +0000 UTC Normal Pod all-in-one-ui-cb55b76b9-kzxlc.spec.containers{jaeger} Started Started container jaeger kubelet logger.go:42: 02:18:23 | allinone | Deleting namespace: kuttl-test-real-marten === CONT kuttl/harness/production logger.go:42: 02:18:29 | production | Ignoring add-tracking-id.yaml as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:18:29 | production | Ignoring ensure-ingress-host.sh as it does not match file name regexp: ^(\d+)-(?:[^\.]+)(?:\.yaml)?$ logger.go:42: 02:18:29 | production | Creating namespace: kuttl-test-flying-kit logger.go:42: 02:18:29 | production/1-install | starting test step 1-install logger.go:42: 02:18:29 | production/1-install | Jaeger:kuttl-test-flying-kit/production-ui created logger.go:42: 02:28:30 | production/1-install | test step failed 1-install case.go:364: failed in step 1-install case.go:366: --- Deployment:kuttl-test-flying-kit/production-ui-collector +++ Deployment:kuttl-test-flying-kit/production-ui-collector @@ -1,10 +1,412 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: production-ui + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: production-ui-collector + app.kubernetes.io/part-of: jaeger + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"8d52adef-3083-4ca4-9423-522974b5bc3d"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-collector"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"COLLECTOR_OTLP_ENABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"COLLECTOR_ZIPKIN_HOST_PORT"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":4317,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":4318,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":9411,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14250,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14267,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14268,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":14269,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/jaeger/sampling"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls-config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"production-ui-collector-tls-config-volume"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"production-ui-sampling-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"production-ui-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T02:18:33Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T02:18:33Z" name: production-ui-collector namespace: kuttl-test-flying-kit + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: production-ui + uid: 8d52adef-3083-4ca4-9423-522974b5bc3d spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: production-ui + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: production-ui-collector + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "14269" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: collector + app.kubernetes.io/instance: production-ui + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: production-ui-collector + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --collector.grpc.tls.cert=/etc/tls-config/tls.crt + - --collector.grpc.tls.enabled=true + - --collector.grpc.tls.key=/etc/tls-config/tls.key + - --sampling.strategies-file=/etc/jaeger/sampling/sampling.json + - --es.server-urls=https://elasticsearch.kuttl-test-flying-kit.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: COLLECTOR_ZIPKIN_HOST_PORT + value: :9411 + - name: COLLECTOR_OTLP_ENABLED + value: "true" + image: registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-collector + ports: + - containerPort: 9411 + name: zipkin + protocol: TCP + - containerPort: 14267 + name: c-tchan-trft + protocol: TCP + - containerPort: 14268 + name: c-binary-trft + protocol: TCP + - containerPort: 14269 + name: admin-http + protocol: TCP + - containerPort: 14250 + name: grpc + protocol: TCP + - containerPort: 4317 + name: grpc-otlp + protocol: TCP + - containerPort: 4318 + name: http-otlp + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14269 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/jaeger/sampling + name: production-ui-sampling-configuration-volume + readOnly: true + - mountPath: /etc/tls-config + name: production-ui-collector-tls-config-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: production-ui-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: production-ui + serviceAccountName: production-ui + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: sampling + path: sampling.json + name: production-ui-sampling-configuration + name: production-ui-sampling-configuration-volume + - name: production-ui-collector-tls-config-volume + secret: + defaultMode: 420 + secretName: production-ui-collector-headless-tls + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: production-ui-trusted-ca + name: production-ui-trusted-ca + - name: certs + secret: + defaultMode: 420 + secretName: production-ui-jaeger-elasticsearch status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T02:18:33Z" + lastUpdateTime: "2024-09-19T02:18:33Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T02:18:33Z" + lastUpdateTime: "2024-09-19T02:18:33Z" + message: ReplicaSet "production-ui-collector-5cfd4b6dcd" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 1 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-flying-kit/production-ui-collector: .status.readyReplicas: key is missing from map case.go:366: --- Deployment:kuttl-test-flying-kit/production-ui-query +++ Deployment:kuttl-test-flying-kit/production-ui-query @@ -1,10 +1,508 @@ apiVersion: apps/v1 kind: Deployment metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.jaegertracing.io/inject: production-ui + sidecar.jaegertracing.io/revision: "1" + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: production-ui + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: production-ui-query + app.kubernetes.io/part-of: jaeger + sidecar.jaegertracing.io/injected: production-ui + managedFields: + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.jaegertracing.io/inject: {} + f:sidecar.jaegertracing.io/revision: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:ownerReferences: + .: {} + k:{"uid":"8d52adef-3083-4ca4-9423-522974b5bc3d"}: {} + f:spec: + f:progressDeadlineSeconds: {} + f:replicas: {} + f:revisionHistoryLimit: {} + f:selector: {} + f:strategy: + f:type: {} + f:template: + f:metadata: + f:annotations: + .: {} + f:linkerd.io/inject: {} + f:prometheus.io/port: {} + f:prometheus.io/scrape: {} + f:sidecar.istio.io/inject: {} + f:sidecar.jaegertracing.io/inject: {} + f:labels: + .: {} + f:app: {} + f:app.kubernetes.io/component: {} + f:app.kubernetes.io/instance: {} + f:app.kubernetes.io/managed-by: {} + f:app.kubernetes.io/name: {} + f:app.kubernetes.io/part-of: {} + f:spec: + f:containers: + k:{"name":"jaeger-query"}: + .: {} + f:args: {} + f:env: + .: {} + k:{"name":"JAEGER_DISABLED"}: + .: {} + f:name: {} + f:value: {} + k:{"name":"METRICS_STORAGE_TYPE"}: + .: {} + f:name: {} + k:{"name":"SPAN_STORAGE_TYPE"}: + .: {} + f:name: {} + f:value: {} + f:image: {} + f:imagePullPolicy: {} + f:livenessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":16685,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16686,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + k:{"containerPort":16687,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:readinessProbe: + .: {} + f:failureThreshold: {} + f:httpGet: + .: {} + f:path: {} + f:port: {} + f:scheme: {} + f:initialDelaySeconds: {} + f:periodSeconds: {} + f:successThreshold: {} + f:timeoutSeconds: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/certs"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/config"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"name":"oauth-proxy"}: + .: {} + f:args: {} + f:image: {} + f:imagePullPolicy: {} + f:name: {} + f:ports: + .: {} + k:{"containerPort":8443,"protocol":"TCP"}: + .: {} + f:containerPort: {} + f:name: {} + f:protocol: {} + f:resources: {} + f:terminationMessagePath: {} + f:terminationMessagePolicy: {} + f:volumeMounts: + .: {} + k:{"mountPath":"/etc/pki/ca-trust/extracted/pem"}: + .: {} + f:mountPath: {} + f:name: {} + f:readOnly: {} + k:{"mountPath":"/etc/tls/private"}: + .: {} + f:mountPath: {} + f:name: {} + f:dnsPolicy: {} + f:enableServiceLinks: {} + f:restartPolicy: {} + f:schedulerName: {} + f:securityContext: {} + f:serviceAccount: {} + f:serviceAccountName: {} + f:terminationGracePeriodSeconds: {} + f:volumes: + .: {} + k:{"name":"certs"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + k:{"name":"production-ui-trusted-ca"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"production-ui-ui-configuration-volume"}: + .: {} + f:configMap: + .: {} + f:defaultMode: {} + f:items: {} + f:name: {} + f:name: {} + k:{"name":"production-ui-ui-oauth-proxy-tls"}: + .: {} + f:name: {} + f:secret: + .: {} + f:defaultMode: {} + f:secretName: {} + manager: jaeger-operator + operation: Update + time: "2024-09-19T02:23:34Z" + - apiVersion: apps/v1 + fieldsType: FieldsV1 + fieldsV1: + f:metadata: + f:annotations: + f:deployment.kubernetes.io/revision: {} + f:status: + f:conditions: + .: {} + k:{"type":"Available"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + k:{"type":"Progressing"}: + .: {} + f:lastTransitionTime: {} + f:lastUpdateTime: {} + f:message: {} + f:reason: {} + f:status: {} + f:type: {} + f:observedGeneration: {} + f:replicas: {} + f:unavailableReplicas: {} + f:updatedReplicas: {} + manager: kube-controller-manager + operation: Update + subresource: status + time: "2024-09-19T02:23:34Z" name: production-ui-query namespace: kuttl-test-flying-kit + ownerReferences: + - apiVersion: jaegertracing.io/v1 + controller: true + kind: Jaeger + name: production-ui + uid: 8d52adef-3083-4ca4-9423-522974b5bc3d spec: + progressDeadlineSeconds: 600 replicas: 1 + revisionHistoryLimit: 10 + selector: + matchLabels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: production-ui + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: production-ui-query + app.kubernetes.io/part-of: jaeger + strategy: + type: Recreate + template: + metadata: + annotations: + linkerd.io/inject: disabled + prometheus.io/port: "16687" + prometheus.io/scrape: "true" + sidecar.istio.io/inject: "false" + sidecar.jaegertracing.io/inject: production-ui + creationTimestamp: null + labels: + app: jaeger + app.kubernetes.io/component: query + app.kubernetes.io/instance: production-ui + app.kubernetes.io/managed-by: jaeger-operator + app.kubernetes.io/name: production-ui-query + app.kubernetes.io/part-of: jaeger + spec: + containers: + - args: + - --query.ui-config=/etc/config/ui.json + - --es.server-urls=https://elasticsearch.kuttl-test-flying-kit.svc.cluster.local:9200 + - --es.tls.enabled=true + - --es.tls.ca=/certs/ca + - --es.tls.cert=/certs/cert + - --es.tls.key=/certs/key + - --es.timeout=15s + - --es.num-shards=1 + - --es.num-replicas=0 + env: + - name: SPAN_STORAGE_TYPE + value: elasticsearch + - name: METRICS_STORAGE_TYPE + - name: JAEGER_DISABLED + value: "false" + - name: JAEGER_SERVICE_NAME + value: production-ui.kuttl-test-flying-kit + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-query + ports: + - containerPort: 16685 + name: grpc-query + protocol: TCP + - containerPort: 16686 + name: query + protocol: TCP + - containerPort: 16687 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 16687 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/config + name: production-ui-ui-configuration-volume + readOnly: true + - mountPath: /etc/pki/ca-trust/extracted/pem + name: production-ui-trusted-ca + readOnly: true + - mountPath: /certs + name: certs + readOnly: true + - args: + - --cookie-secret=TByTkKtx4v1RVMvjxqsgQe + - --https-address=:8443 + - '--openshift-sar={"namespace": "kuttl-test-flying-kit", "resource": "pods", + "verb": "get"}' + - --openshift-service-account=production-ui-ui-proxy + - --provider=openshift + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:16686 + env: + - name: JAEGER_SERVICE_NAME + value: production-ui.kuttl-test-flying-kit + - name: JAEGER_PROPAGATION + value: jaeger,b3,w3c + image: registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508 + imagePullPolicy: IfNotPresent + name: oauth-proxy + ports: + - containerPort: 8443 + name: public + protocol: TCP + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/tls/private + name: production-ui-ui-oauth-proxy-tls + - mountPath: /etc/pki/ca-trust/extracted/pem + name: production-ui-trusted-ca + readOnly: true + - args: + - --agent.tags=cluster=undefined,deployment.name=production-ui-query,host.ip=${HOST_IP:},pod.name=${POD_NAME:},pod.namespace=kuttl-test-flying-kit + - --reporter.grpc.host-port=dns:///production-ui-collector-headless.kuttl-test-flying-kit.svc:14250 + - --reporter.grpc.tls.ca=/etc/pki/ca-trust/source/service-ca/service-ca.crt + - --reporter.grpc.tls.enabled=true + env: + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: HOST_IP + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: status.hostIP + image: registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147 + imagePullPolicy: IfNotPresent + livenessProbe: + failureThreshold: 5 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 5 + periodSeconds: 15 + successThreshold: 1 + timeoutSeconds: 1 + name: jaeger-agent + ports: + - containerPort: 5775 + name: zk-compact-trft + protocol: UDP + - containerPort: 5778 + name: config-rest + protocol: TCP + - containerPort: 6831 + name: jg-compact-trft + protocol: UDP + - containerPort: 6832 + name: jg-binary-trft + protocol: UDP + - containerPort: 14271 + name: admin-http + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: / + port: 14271 + scheme: HTTP + initialDelaySeconds: 1 + periodSeconds: 10 + successThreshold: 1 + timeoutSeconds: 1 + resources: {} + terminationMessagePath: /dev/termination-log + terminationMessagePolicy: File + volumeMounts: + - mountPath: /etc/pki/ca-trust/extracted/pem + name: production-ui-trusted-ca + readOnly: true + - mountPath: /etc/pki/ca-trust/source/service-ca + name: production-ui-service-ca + readOnly: true + dnsPolicy: ClusterFirst + enableServiceLinks: false + restartPolicy: Always + schedulerName: default-scheduler + securityContext: {} + serviceAccount: production-ui-ui-proxy + serviceAccountName: production-ui-ui-proxy + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + defaultMode: 420 + items: + - key: ui + path: ui.json + name: production-ui-ui-configuration + name: production-ui-ui-configuration-volume + - configMap: + defaultMode: 420 + items: + - key: ca-bundle.crt + path: tls-ca-bundle.pem + name: production-ui-trusted-ca + name: production-ui-trusted-ca + - name: production-ui-ui-oauth-proxy-tls + secret: + defaultMode: 420 + secretName: production-ui-ui-oauth-proxy-tls + - name: certs + secret: + defaultMode: 420 + secretName: production-ui-jaeger-elasticsearch + - configMap: + defaultMode: 420 + items: + - key: service-ca.crt + path: service-ca.crt + name: production-ui-service-ca + name: production-ui-service-ca status: - readyReplicas: 1 + conditions: + - lastTransitionTime: "2024-09-19T02:18:33Z" + lastUpdateTime: "2024-09-19T02:18:33Z" + message: Deployment does not have minimum availability. + reason: MinimumReplicasUnavailable + status: "False" + type: Available + - lastTransitionTime: "2024-09-19T02:18:33Z" + lastUpdateTime: "2024-09-19T02:18:33Z" + message: ReplicaSet "production-ui-query-c68dfc44c" is progressing. + reason: ReplicaSetUpdated + status: "True" + type: Progressing + observedGeneration: 3 + replicas: 1 + unavailableReplicas: 1 + updatedReplicas: 1 case.go:366: resource Deployment:kuttl-test-flying-kit/production-ui-query: .status.readyReplicas: key is missing from map logger.go:42: 02:28:30 | production | production events from ns kuttl-test-flying-kit: logger.go:42: 02:28:30 | production | 2024-09-19 02:18:33 +0000 UTC Normal Pod production-ui-collector-5cfd4b6dcd-klm9c Binding Scheduled Successfully assigned kuttl-test-flying-kit/production-ui-collector-5cfd4b6dcd-klm9c to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:28:30 | production | 2024-09-19 02:18:33 +0000 UTC Normal ReplicaSet.apps production-ui-collector-5cfd4b6dcd SuccessfulCreate Created pod: production-ui-collector-5cfd4b6dcd-klm9c replicaset-controller logger.go:42: 02:28:30 | production | 2024-09-19 02:18:33 +0000 UTC Normal Deployment.apps production-ui-collector ScalingReplicaSet Scaled up replica set production-ui-collector-5cfd4b6dcd to 1 deployment-controller logger.go:42: 02:28:30 | production | 2024-09-19 02:18:33 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62 Binding Scheduled Successfully assigned kuttl-test-flying-kit/production-ui-query-c68dfc44c-z8f62 to ip-10-0-57-83.us-west-2.compute.internal default-scheduler logger.go:42: 02:28:30 | production | 2024-09-19 02:18:33 +0000 UTC Normal ReplicaSet.apps production-ui-query-c68dfc44c SuccessfulCreate Created pod: production-ui-query-c68dfc44c-z8f62 replicaset-controller logger.go:42: 02:28:30 | production | 2024-09-19 02:18:33 +0000 UTC Normal Deployment.apps production-ui-query ScalingReplicaSet Scaled up replica set production-ui-query-c68dfc44c to 1 deployment-controller logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-collector-5cfd4b6dcd-klm9c AddedInterface Add eth0 [10.130.0.110/23] from ovn-kubernetes multus logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-collector-5cfd4b6dcd-klm9c.spec.containers{jaeger-collector} Pulled Container image "registry.redhat.io/rhosdt/jaeger-collector-rhel8@sha256:4bab63290ff52e4f6328009f9c8b2c4496b230e9f8a98eac01736a66a291ff6c" already present on machine kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-collector-5cfd4b6dcd-klm9c.spec.containers{jaeger-collector} Created Created container jaeger-collector kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-collector-5cfd4b6dcd-klm9c.spec.containers{jaeger-collector} Started Started container jaeger-collector kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62 AddedInterface Add eth0 [10.130.0.111/23] from ovn-kubernetes multus logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-query} Pulled Container image "registry.redhat.io/rhosdt/jaeger-query-rhel8@sha256:a9186dcd910256c0f464b0a3928844a01de166a10c186c97ef4581bf288c23cb" already present on machine kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-query} Created Created container jaeger-query kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-query} Started Started container jaeger-query kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{oauth-proxy} Pulled Container image "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:5b2ca060dff5d64979497bd8b7144f8e115763984f2ae7d1ddf6ef416d636508" already present on machine kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{oauth-proxy} Created Created container oauth-proxy kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{oauth-proxy} Started Started container oauth-proxy kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-agent} Pulled Container image "registry.redhat.io/rhosdt/jaeger-agent-rhel8@sha256:69d728dd27fbd47fc667704adfa76746392f1f2331a927e5c436965d651ae147" already present on machine kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-agent} Created Created container jaeger-agent kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:34 +0000 UTC Normal Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-agent} Started Started container jaeger-agent kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:35 +0000 UTC Warning Pod production-ui-collector-5cfd4b6dcd-klm9c.spec.containers{jaeger-collector} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:36 +0000 UTC Warning Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-query} Unhealthy Readiness probe failed: HTTP probe failed with statuscode: 503 kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:46 +0000 UTC Warning Pod production-ui-collector-5cfd4b6dcd-klm9c.spec.containers{jaeger-collector} BackOff Back-off restarting failed container jaeger-collector in pod production-ui-collector-5cfd4b6dcd-klm9c_kuttl-test-flying-kit(bec4dd55-4918-4bb6-930f-6a43d35717f5) kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:46 +0000 UTC Warning Pod production-ui-query-c68dfc44c-z8f62.spec.containers{jaeger-query} BackOff Back-off restarting failed container jaeger-query in pod production-ui-query-c68dfc44c-z8f62_kuttl-test-flying-kit(dc7923d1-d2cb-445b-838f-a340f5620017) kubelet logger.go:42: 02:28:30 | production | 2024-09-19 02:18:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 02:28:30 | production | 2024-09-19 02:18:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedGetResourceMetric failed to get memory utilization: unable to get metrics for resource memory: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 02:28:30 | production | 2024-09-19 02:18:49 +0000 UTC Warning HorizontalPodAutoscaler.autoscaling production-ui-collector FailedComputeMetricsReplicas invalid metrics (2 invalid out of 2), first error is: failed to get cpu resource metric value: failed to get cpu utilization: unable to get metrics for resource cpu: no metrics returned from resource metrics API horizontal-pod-autoscaler logger.go:42: 02:28:31 | production | Deleting namespace: kuttl-test-flying-kit === CONT kuttl/harness/artifacts logger.go:42: 02:28:37 | artifacts | Creating namespace: kuttl-test-awake-seasnail logger.go:42: 02:28:37 | artifacts | artifacts events from ns kuttl-test-awake-seasnail: logger.go:42: 02:28:37 | artifacts | Deleting namespace: kuttl-test-awake-seasnail === CONT kuttl harness.go:405: run tests finished harness.go:513: cleaning up harness.go:570: removing temp folder: "" --- FAIL: kuttl (664.93s) --- FAIL: kuttl/harness (0.00s) --- PASS: kuttl/harness/allinone (50.54s) --- FAIL: kuttl/harness/production (607.80s) --- PASS: kuttl/harness/artifacts (6.00s) FAIL + exit_code=1 + set -e + go install github.com/RH-QE-Distributed-Tracing/junitcli/cmd/junitcli@v1.0.6 + junitcli --suite-name ui --report --output /logs/artifacts/ui.xml ./artifacts/kuttl-report.xml time="2024-09-19T02:28:43Z" level=debug msg="Setting a new name for the test suites" time="2024-09-19T02:28:43Z" level=debug msg="Removing 'artifacts' TestCase" time="2024-09-19T02:28:43Z" level=debug msg="normalizing test case names" time="2024-09-19T02:28:43Z" level=debug msg="ui/allinone -> ui_allinone" time="2024-09-19T02:28:43Z" level=debug msg="ui/production -> ui_production" time="2024-09-19T02:28:43Z" level=debug msg="ui/artifacts -> ui_artifacts" +---------------+--------+ | NAME | RESULT | +---------------+--------+ | ui_allinone | passed | | ui_production | failed | | ui_artifacts | passed | +---------------+--------+ + '[' '' '!=' true ']' + '[' false == true ']' make[1]: Leaving directory '/tmp/jaeger-tests'