ansible-playbook [core 2.16.14] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-uHD executable location = /usr/local/bin/ansible-playbook python version = 3.12.1 (main, Feb 21 2024, 14:18:26) [GCC 8.5.0 20210514 (Red Hat 8.5.0-21)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml statically imported: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml statically imported: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/check_candlepin.yml Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_environments.yml *********************************************** 1 plays in /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml PLAY [Basic repository enablement/disablement test] **************************** TASK [Get LSR_RHC_TEST_DATA environment variable] ****************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:3 Saturday 08 November 2025 14:20:32 -0500 (0:00:00.032) 0:00:00.032 ***** ok: [managed-node2] => { "ansible_facts": { "lsr_rhc_test_data_file": "" }, "changed": false } TASK [Import test data] ******************************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:12 Saturday 08 November 2025 14:20:32 -0500 (0:00:00.047) 0:00:00.079 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "lsr_rhc_test_data_file | length > 0", "skip_reason": "Conditional result was False" } TASK [Get facts for external test data] **************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:16 Saturday 08 November 2025 14:20:32 -0500 (0:00:00.064) 0:00:00.143 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "lsr_rhc_test_data_file | length > 0", "skip_reason": "Conditional result was False" } TASK [Set local lsr_rhc_test_data] ********************************************* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:24 Saturday 08 November 2025 14:20:32 -0500 (0:00:00.041) 0:00:00.185 ***** ok: [managed-node2] => { "ansible_facts": { "lsr_rhc_test_data": { "baseurl": "http://localhost:8080", "candlepin_host": "candlepin.local", "candlepin_insecure": false, "candlepin_port": 8443, "candlepin_prefix": "/candlepin", "env_nonworking": "Ceci n'est pas une environment", "envs_register": [ "Environment 2" ], "insights": false, "proxy_auth_hostname": "localhost", "proxy_auth_password": "proxypass", "proxy_auth_port": 3130, "proxy_auth_scheme": "https", "proxy_auth_username": "proxyuser", "proxy_noauth_hostname": "localhost", "proxy_noauth_port": 3128, "proxy_noauth_scheme": "https", "proxy_nonworking_hostname": "wrongproxy", "proxy_nonworking_password": "wrong-proxypassword", "proxy_nonworking_port": 4000, "proxy_nonworking_username": "wrong-proxyuser", "reg_activation_keys": [ "default_key" ], "reg_invalid_password": "invalid-password", "reg_invalid_username": "invalid-user", "reg_organization": "donaldduck", "reg_password": "password", "reg_username": "doc", "release": null, "repositories": [ { "name": "donaldy-content-label-7051", "state": "enabled" }, { "name": "content-label-32060", "state": "disabled" } ] } }, "ansible_included_var_files": [ "/tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/../files/candlepin_data.yml" ], "changed": false } TASK [Check if system is ostree] *********************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:32 Saturday 08 November 2025 14:20:32 -0500 (0:00:00.038) 0:00:00.223 ***** ok: [managed-node2] => { "ansible_facts": { "discovered_interpreter_python": "/usr/libexec/platform-python" }, "changed": false, "stat": { "exists": false } } TASK [Set flag to indicate system is ostree] *********************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:37 Saturday 08 November 2025 14:20:33 -0500 (0:00:00.655) 0:00:00.879 ***** ok: [managed-node2] => { "ansible_facts": { "__rhc_is_ostree": false }, "changed": false } TASK [Get facts for external test data] **************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:9 Saturday 08 November 2025 14:20:33 -0500 (0:00:00.018) 0:00:00.897 ***** ok: [managed-node2] TASK [Set helper fact for Candlepin base URL] ********************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:17 Saturday 08 November 2025 14:20:33 -0500 (0:00:00.668) 0:00:01.566 ***** ok: [managed-node2] => { "ansible_facts": { "_cp_url": "https://candlepin.local:8443/candlepin" }, "changed": false } TASK [Set helper fact for Candlepin owner URL] ********************************* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:21 Saturday 08 November 2025 14:20:33 -0500 (0:00:00.039) 0:00:01.605 ***** ok: [managed-node2] => { "ansible_facts": { "_cp_url_owner": "https://candlepin.local:8443/candlepin/owners/donaldduck" }, "changed": false } TASK [Add candlepin hostname to /etc/hosts] ************************************ task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:25 Saturday 08 November 2025 14:20:33 -0500 (0:00:00.031) 0:00:01.637 ***** changed: [managed-node2] => { "backup": "", "changed": true } MSG: line added TASK [Install needed packages] ************************************************* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:31 Saturday 08 November 2025 14:20:34 -0500 (0:00:00.443) 0:00:02.081 ***** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: shadow-utils-subid-2:4.6-22.el8.x86_64", "Installed: container-selinux-2:2.229.0-2.module_el8+847+7863d4e6.noarch", "Installed: criu-3.18-4.module_el8+804+f131391c.x86_64", "Installed: dnsmasq-2.79-33.el8.x86_64", "Installed: libslirp-4.4.0-1.module_el8+804+f131391c.x86_64", "Installed: protobuf-c-1.3.0-8.el8.x86_64", "Installed: slirp4netns-1.2.3-1.module_el8+951+32019cde.x86_64", "Installed: podman-catatonit-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: fuse3-libs-3.3.0-19.el8.x86_64", "Installed: fuse3-3.3.0-19.el8.x86_64", "Installed: containers-common-2:1-81.module_el8+968+fbb249c7.x86_64", "Installed: podman-gvproxy-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: podman-plugins-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: fuse-overlayfs-1.13-1.module_el8+804+f131391c.x86_64", "Installed: fuse-common-3.3.0-19.el8.x86_64", "Installed: podman-3:4.9.4-0.1.module_el8+971+3d3df00d.x86_64", "Installed: libnet-1.1.6-15.el8.x86_64", "Installed: runc-1:1.1.12-1.module_el8+885+7da147f3.x86_64", "Installed: containernetworking-plugins-1:1.4.0-2.module_el8+974+0c52b299.x86_64", "Installed: conmon-3:2.1.10-1.module_el8+804+f131391c.x86_64" ] } lsrpackages: podman TASK [Clean up Candlepin container] ******************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:38 Saturday 08 November 2025 14:21:36 -0500 (0:01:01.849) 0:01:03.930 ***** included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml for managed-node2 TASK [Check if the candlepin container exists] ********************************* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:6 Saturday 08 November 2025 14:21:36 -0500 (0:00:00.036) 0:01:03.967 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a", "--filter", "name=candlepin" ], "delta": "0:00:01.570694", "end": "2025-11-08 14:21:38.089573", "rc": 0, "start": "2025-11-08 14:21:36.518879" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Ensure that Candlepin container doesn't exist] *************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:17 Saturday 08 November 2025 14:21:38 -0500 (0:00:02.018) 0:01:05.985 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "\"candlepin\" in __rhc_candlepin_cont_exists.stdout", "skip_reason": "Conditional result was False" } TASK [Start Candlepin container] *********************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:41 Saturday 08 November 2025 14:21:38 -0500 (0:00:00.030) 0:01:06.016 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "run", "--rm", "--detach", "--hostname", "candlepin.local", "--name", "candlepin", "--publish", "8443:8443", "--publish", "8080:8080", "ghcr.io/candlepin/candlepin-unofficial" ], "delta": "0:00:15.764384", "end": "2025-11-08 14:21:54.251378", "rc": 0, "start": "2025-11-08 14:21:38.486994" } STDOUT: 22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa STDERR: Trying to pull ghcr.io/candlepin/candlepin-unofficial:latest... Getting image source signatures Copying blob sha256:868e32b0b96932a9d44af4fddb5291921afffb37e16e8c9bc0382ef20f02e4a0 Copying config sha256:fc49ff13f7f3d9b39189a4dadc708bc5cf2aea44997b748d698128d169c494b8 Writing manifest to image destination TASK [Ensure directories exist] ************************************************ task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:62 Saturday 08 November 2025 14:21:54 -0500 (0:00:16.168) 0:01:22.185 ***** changed: [managed-node2] => (item=/etc/pki/product) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/etc/pki/product", "mode": "0755", "owner": "root", "path": "/etc/pki/product", "secontext": "unconfined_u:object_r:cert_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/etc/pki/product-default) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/etc/pki/product-default", "mode": "0755", "owner": "root", "path": "/etc/pki/product-default", "secontext": "unconfined_u:object_r:cert_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/etc/rhsm/ca) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/etc/rhsm/ca", "mode": "0755", "owner": "root", "path": "/etc/rhsm/ca", "secontext": "unconfined_u:object_r:rhsmcertd_config_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Copy product certificates] *********************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:72 Saturday 08 November 2025 14:21:58 -0500 (0:00:04.214) 0:01:26.399 ***** ok: [managed-node2] => (item=7050) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "podman", "cp", "candlepin:/home/candlepin/devel/candlepin/generated_certs/7050.pem", "/etc/pki/product-default/" ], "delta": "0:00:00.670299", "end": "2025-11-08 14:22:00.797237", "item": "7050", "rc": 0, "start": "2025-11-08 14:22:00.126938" } TASK [Copy Candlepin CA certificate for subscription-manager] ****************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:83 Saturday 08 November 2025 14:22:01 -0500 (0:00:02.551) 0:01:28.951 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "cp", "candlepin:/etc/candlepin/certs/candlepin-ca.crt", "/etc/rhsm/ca/candlepin-ca.pem" ], "delta": "0:00:00.400865", "end": "2025-11-08 14:22:03.368640", "rc": 0, "start": "2025-11-08 14:22:02.967775" } TASK [Copy Candlepin CA certificate for system] ******************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:92 Saturday 08 November 2025 14:22:03 -0500 (0:00:02.473) 0:01:31.425 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "cp", "candlepin:/etc/candlepin/certs/candlepin-ca.crt", "/etc/pki/ca-trust/source/anchors/candlepin-ca.pem" ], "delta": "0:00:00.359752", "end": "2025-11-08 14:22:05.196336", "rc": 0, "start": "2025-11-08 14:22:04.836584" } TASK [Update system certificates store] **************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:101 Saturday 08 November 2025 14:22:05 -0500 (0:00:01.799) 0:01:33.224 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "update-ca-trust", "extract" ], "delta": "0:00:01.887729", "end": "2025-11-08 14:22:08.542892", "rc": 0, "start": "2025-11-08 14:22:06.655163" } TASK [Wait for started Candlepin] ********************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:108 Saturday 08 November 2025 14:22:08 -0500 (0:00:03.454) 0:01:36.679 ***** ok: [managed-node2] => { "attempts": 1, "changed": false, "connection": "close", "content_type": "application/json", "cookies": {}, "cookies_string": "", "date": "Sat, 08 Nov 2025 19:22:21 GMT", "elapsed": 11, "redirected": true, "status": 200, "transfer_encoding": "chunked", "url": "https://candlepin.local:8443/candlepin/", "vary": "accept-encoding", "x_candlepin_request_uuid": "1a4ffaf5-7461-4223-9057-0fef599cd6a2", "x_version": "4.7.1-1" } MSG: OK (unknown bytes) TASK [Install GPG key for RPM repositories] ************************************ task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:118 Saturday 08 November 2025 14:22:21 -0500 (0:00:12.827) 0:01:49.506 ***** changed: [managed-node2] => { "changed": true, "checksum_dest": null, "checksum_src": "5bd09883847285c54e6064f29dd9686c1afa5d72", "dest": "/etc/pki/rpm-gpg/RPM-GPG-KEY-candlepin", "elapsed": 0, "gid": 0, "group": "root", "md5sum": "aadf73f83655a28e287fab4099f1e17a", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cert_t:s0", "size": 1660, "src": "/root/.ansible/tmp/ansible-tmp-1762629741.7135143-10077-30968526963278/tmpf0cs1bcl", "state": "file", "status_code": 200, "uid": 0, "url": "http://candlepin.local:8080/RPM-GPG-KEY-candlepin" } MSG: OK (1660 bytes) TASK [Add environments] ******************************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:127 Saturday 08 November 2025 14:22:22 -0500 (0:00:00.644) 0:01:50.151 ***** ok: [managed-node2] => (item={'name': 'Environment 1', 'desc': 'The environment 1', 'id': 'envId1'}) => { "ansible_loop_var": "item", "changed": false, "connection": "close", "content_type": "application/json", "cookies": {}, "cookies_string": "", "date": "Sat, 08 Nov 2025 19:22:23 GMT", "elapsed": 0, "item": { "desc": "The environment 1", "id": "envId1", "name": "Environment 1" }, "json": { "contentPrefix": null, "created": "2025-11-08T19:22:23+0000", "description": "The environment 1", "environmentContent": [], "id": "envId1", "name": "Environment 1", "owner": { "anonymous": false, "contentAccessMode": "org_environment", "displayName": "Donald Duck", "href": "/owners/donaldduck", "id": "8a8082e4997b71a401997b71b9520004", "key": "donaldduck" }, "type": null, "updated": "2025-11-08T19:22:23+0000" }, "redirected": false, "status": 200, "transfer_encoding": "chunked", "url": "https://candlepin.local:8443/candlepin/owners/donaldduck/environments", "vary": "accept-encoding", "x_candlepin_request_uuid": "c3c70ccf-12a6-4bc0-8c6c-60f144e00af2", "x_version": "4.7.1-1" } MSG: OK (unknown bytes) ok: [managed-node2] => (item={'name': 'Environment 2', 'desc': 'The environment 2', 'id': 'envId2'}) => { "ansible_loop_var": "item", "changed": false, "connection": "close", "content_type": "application/json", "cookies": {}, "cookies_string": "", "date": "Sat, 08 Nov 2025 19:22:23 GMT", "elapsed": 0, "item": { "desc": "The environment 2", "id": "envId2", "name": "Environment 2" }, "json": { "contentPrefix": null, "created": "2025-11-08T19:22:23+0000", "description": "The environment 2", "environmentContent": [], "id": "envId2", "name": "Environment 2", "owner": { "anonymous": false, "contentAccessMode": "org_environment", "displayName": "Donald Duck", "href": "/owners/donaldduck", "id": "8a8082e4997b71a401997b71b9520004", "key": "donaldduck" }, "type": null, "updated": "2025-11-08T19:22:23+0000" }, "redirected": false, "status": 200, "transfer_encoding": "chunked", "url": "https://candlepin.local:8443/candlepin/owners/donaldduck/environments", "vary": "accept-encoding", "x_candlepin_request_uuid": "d1604b20-9aac-4315-918e-2826abbbfa10", "x_version": "4.7.1-1" } MSG: OK (unknown bytes) TASK [Check Candlepin works] *************************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/check_candlepin.yml:3 Saturday 08 November 2025 14:22:24 -0500 (0:00:01.766) 0:01:51.917 ***** ok: [managed-node2] => { "changed": false, "connection": "close", "content_type": "application/json", "cookies": {}, "cookies_string": "", "date": "Sat, 08 Nov 2025 19:22:24 GMT", "elapsed": 0, "redirected": true, "status": 200, "transfer_encoding": "chunked", "url": "https://candlepin.local:8443/candlepin/", "vary": "accept-encoding", "x_candlepin_request_uuid": "e6df1ca1-6f31-420c-931d-e48ec6be6ae1", "x_version": "4.7.1-1" } MSG: OK (unknown bytes) TASK [Skip if no test environments are set] ************************************ task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:17 Saturday 08 November 2025 14:22:24 -0500 (0:00:00.529) 0:01:52.447 ***** META: end_play conditional evaluated to False, continuing play skipping: [managed-node2] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Ensure ansible_facts used by the test] *********************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:24 Saturday 08 November 2025 14:22:24 -0500 (0:00:00.024) 0:01:52.472 ***** ok: [managed-node2] TASK [Register (wrong environment)] ******************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:35 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.460) 0:01:52.932 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.019) 0:01:52.952 ***** included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.015) 0:01:52.967 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__rhc_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.013) 0:01:52.981 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __rhc_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.011) 0:01:52.992 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __rhc_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.011) 0:01:53.003 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_distribution == \"RedHat\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.015) 0:01:53.019 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_distribution == \"RedHat\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.011) 0:01:53.030 ***** [WARNING]: Collection community.general does not support Ansible version 2.16.14 included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node2 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:25 -0500 (0:00:00.030) 0:01:53.060 ***** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: dnf-plugin-subscription-manager-1.28.42-1.el8.x86_64", "Installed: subscription-manager-rhsm-certificates-20220623-1.el8.noarch", "Installed: subscription-manager-1.28.42-1.el8.x86_64", "Installed: python3-iniparse-0.4-31.el8.noarch", "Installed: python3-inotify-0.9.6-13.el8.noarch", "Installed: python3-subscription-manager-rhsm-1.28.42-1.el8.x86_64", "Installed: python3-librepo-1.14.2-5.el8.x86_64", "Installed: python3-cloud-what-1.28.42-1.el8.x86_64", "Installed: usermode-1.113-2.el8.x86_64", "Installed: python3-ethtool-0.14-5.el8.x86_64" ] } lsrpackages: subscription-manager TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:29 -0500 (0:00:04.741) 0:01:57.802 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not rhc_release is none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:29 -0500 (0:00:00.013) 0:01:57.815 ***** An exception occurred during task execution. To see the full traceback, use -vvv. The error was: SyntaxError: future feature annotations is not defined fatal: [managed-node2]: FAILED! => { "changed": false, "rc": 1 } MSG: MODULE FAILURE See stdout/stderr for the exact error MODULE_STDOUT: Traceback (most recent call last): File "/root/.ansible/tmp/ansible-tmp-1762629750.0417087-10217-119684284293845/AnsiballZ_redhat_subscription.py", line 107, in _ansiballz_main() File "/root/.ansible/tmp/ansible-tmp-1762629750.0417087-10217-119684284293845/AnsiballZ_redhat_subscription.py", line 99, in _ansiballz_main invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS) File "/root/.ansible/tmp/ansible-tmp-1762629750.0417087-10217-119684284293845/AnsiballZ_redhat_subscription.py", line 48, in invoke_module run_name='__main__', alter_sys=True) File "/usr/lib64/python3.6/runpy.py", line 201, in run_module mod_name, mod_spec, code = _get_module_details(mod_name) File "/usr/lib64/python3.6/runpy.py", line 128, in _get_module_details spec = importlib.util.find_spec(mod_name) File "/usr/lib64/python3.6/importlib/util.py", line 89, in find_spec return _find_spec(fullname, parent.__path__) File "", line 894, in _find_spec File "", line 1157, in find_spec File "", line 1131, in _get_spec File "", line 1112, in _legacy_get_spec File "", line 441, in spec_from_loader File "", line 544, in spec_from_file_location File "/tmp/ansible_community.general.redhat_subscription_payload_crgr8z40/ansible_community.general.redhat_subscription_payload.zip/ansible_collections/community/general/plugins/modules/redhat_subscription.py", line 8 SyntaxError: future feature annotations is not defined MODULE_STDERR: Shared connection to 10.31.43.199 closed. TASK [Assert registration failed] ********************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:59 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.482) 0:01:58.298 ***** ok: [managed-node2] => { "changed": false } MSG: All assertions passed TASK [Register (with existing environments)] *********************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:65 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.014) 0:01:58.313 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.017) 0:01:58.330 ***** included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.015) 0:01:58.345 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__rhc_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.012) 0:01:58.358 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __rhc_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.011) 0:01:58.369 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __rhc_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.012) 0:01:58.381 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_distribution == \"RedHat\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.010) 0:01:58.392 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_distribution == \"RedHat\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.010) 0:01:58.402 ***** included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node2 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:30 -0500 (0:00:00.018) 0:01:58.421 ***** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: subscription-manager TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:33 -0500 (0:00:02.932) 0:02:01.354 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not rhc_release is none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.014) 0:02:01.368 ***** An exception occurred during task execution. To see the full traceback, use -vvv. The error was: SyntaxError: future feature annotations is not defined fatal: [managed-node2]: FAILED! => { "changed": false, "rc": 1 } MSG: MODULE FAILURE See stdout/stderr for the exact error MODULE_STDOUT: Traceback (most recent call last): File "/root/.ansible/tmp/ansible-tmp-1762629753.593116-10275-94820805282821/AnsiballZ_redhat_subscription.py", line 107, in _ansiballz_main() File "/root/.ansible/tmp/ansible-tmp-1762629753.593116-10275-94820805282821/AnsiballZ_redhat_subscription.py", line 99, in _ansiballz_main invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS) File "/root/.ansible/tmp/ansible-tmp-1762629753.593116-10275-94820805282821/AnsiballZ_redhat_subscription.py", line 48, in invoke_module run_name='__main__', alter_sys=True) File "/usr/lib64/python3.6/runpy.py", line 201, in run_module mod_name, mod_spec, code = _get_module_details(mod_name) File "/usr/lib64/python3.6/runpy.py", line 128, in _get_module_details spec = importlib.util.find_spec(mod_name) File "/usr/lib64/python3.6/importlib/util.py", line 89, in find_spec return _find_spec(fullname, parent.__path__) File "", line 894, in _find_spec File "", line 1157, in find_spec File "", line 1131, in _get_spec File "", line 1112, in _legacy_get_spec File "", line 441, in spec_from_loader File "", line 544, in spec_from_file_location File "/tmp/ansible_community.general.redhat_subscription_payload_io3_if60/ansible_community.general.redhat_subscription_payload.zip/ansible_collections/community/general/plugins/modules/redhat_subscription.py", line 8 SyntaxError: future feature annotations is not defined MODULE_STDERR: Shared connection to 10.31.43.199 closed. TASK [Unregister] ************************************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:105 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.373) 0:02:01.741 ***** TASK [fedora.linux_system_roles.rhc : Set ansible_facts required by role] ****** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:3 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.021) 0:02:01.763 ***** included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.rhc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:3 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.014) 0:02:01.778 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "__rhc_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if system is ostree] *************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:11 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.013) 0:02:01.791 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __rhc_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Set flag to indicate system is ostree] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:16 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.012) 0:02:01.804 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "not __rhc_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Check if insights-packages are installed] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/set_vars.yml:20 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.010) 0:02:01.815 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_distribution == \"RedHat\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle insights unregistration] ********** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:6 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.009) 0:02:01.824 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "ansible_distribution == \"RedHat\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Handle system subscription] ************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/main.yml:15 Saturday 08 November 2025 14:22:33 -0500 (0:00:00.010) 0:02:01.835 ***** included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml for managed-node2 TASK [fedora.linux_system_roles.rhc : Ensure required packages are installed] *** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.018) 0:02:01.854 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "rhc_state | d('present') == \"present\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Get subscription status] ***************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:10 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.011) 0:02:01.865 ***** skipping: [managed-node2] => { "changed": false, "false_condition": "rhc_state | d(\"present\") == \"present\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.rhc : Call subscription-manager] *************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.010) 0:02:01.876 ***** An exception occurred during task execution. To see the full traceback, use -vvv. The error was: SyntaxError: future feature annotations is not defined fatal: [managed-node2]: FAILED! => { "changed": false, "rc": 1 } MSG: MODULE FAILURE See stdout/stderr for the exact error MODULE_STDOUT: Traceback (most recent call last): File "/root/.ansible/tmp/ansible-tmp-1762629754.086123-10293-35901109732132/AnsiballZ_redhat_subscription.py", line 107, in _ansiballz_main() File "/root/.ansible/tmp/ansible-tmp-1762629754.086123-10293-35901109732132/AnsiballZ_redhat_subscription.py", line 99, in _ansiballz_main invoke_module(zipped_mod, temp_path, ANSIBALLZ_PARAMS) File "/root/.ansible/tmp/ansible-tmp-1762629754.086123-10293-35901109732132/AnsiballZ_redhat_subscription.py", line 48, in invoke_module run_name='__main__', alter_sys=True) File "/usr/lib64/python3.6/runpy.py", line 201, in run_module mod_name, mod_spec, code = _get_module_details(mod_name) File "/usr/lib64/python3.6/runpy.py", line 128, in _get_module_details spec = importlib.util.find_spec(mod_name) File "/usr/lib64/python3.6/importlib/util.py", line 89, in find_spec return _find_spec(fullname, parent.__path__) File "", line 894, in _find_spec File "", line 1157, in find_spec File "", line 1131, in _get_spec File "", line 1112, in _legacy_get_spec File "", line 441, in spec_from_loader File "", line 544, in spec_from_file_location File "/tmp/ansible_community.general.redhat_subscription_payload_l2_bfgo_/ansible_community.general.redhat_subscription_payload.zip/ansible_collections/community/general/plugins/modules/redhat_subscription.py", line 8 SyntaxError: future feature annotations is not defined MODULE_STDERR: Shared connection to 10.31.43.199 closed. TASK [Clean up Candlepin container] ******************************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:111 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.358) 0:02:02.234 ***** included: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml for managed-node2 TASK [Check if the candlepin container exists] ********************************* task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:6 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.021) 0:02:02.256 ***** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a", "--filter", "name=candlepin" ], "delta": "0:00:00.037175", "end": "2025-11-08 14:22:34.737069", "rc": 0, "start": "2025-11-08 14:22:34.699894" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 22b35e80ac6d ghcr.io/candlepin/candlepin-unofficial:latest /sbin/init 41 seconds ago Up 40 seconds 0.0.0.0:8080->8080/tcp, 0.0.0.0:8443->8443/tcp candlepin TASK [Ensure that Candlepin container doesn't exist] *************************** task path: /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:17 Saturday 08 November 2025 14:22:34 -0500 (0:00:00.375) 0:02:02.632 ***** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "stop", "candlepin" ], "delta": "0:00:01.023792", "end": "2025-11-08 14:22:36.099109", "rc": 0, "start": "2025-11-08 14:22:35.075317" } STDOUT: candlepin PLAY RECAP ********************************************************************* managed-node2 : ok=34 changed=6 unreachable=0 failed=2 skipped=22 rescued=1 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.16.14", "end_time": "2025-11-08T19:22:30.447340+00:00Z", "host": "managed-node2", "message": "MODULE FAILURE\nSee stdout/stderr for the exact error", "rc": 1, "start_time": "2025-11-08T19:22:29.967149+00:00Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" }, { "ansible_version": "2.16.14", "end_time": "2025-11-08T19:22:33.890469+00:00Z", "host": "managed-node2", "message": "MODULE FAILURE\nSee stdout/stderr for the exact error", "rc": 1, "start_time": "2025-11-08T19:22:33.519634+00:00Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" }, { "ansible_version": "2.16.14", "end_time": "2025-11-08T19:22:34.383126+00:00Z", "host": "managed-node2", "message": "MODULE FAILURE\nSee stdout/stderr for the exact error", "rc": 1, "start_time": "2025-11-08T19:22:34.027532+00:00Z", "task_name": "Call subscription-manager", "task_path": "/tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 08 November 2025 14:22:36 -0500 (0:00:01.379) 0:02:04.011 ***** =============================================================================== Install needed packages ------------------------------------------------ 61.85s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:31 Start Candlepin container ---------------------------------------------- 16.17s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:41 Wait for started Candlepin --------------------------------------------- 12.83s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:108 fedora.linux_system_roles.rhc : Ensure required packages are installed --- 4.74s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Ensure directories exist ------------------------------------------------ 4.21s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:62 Update system certificates store ---------------------------------------- 3.45s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:101 fedora.linux_system_roles.rhc : Ensure required packages are installed --- 2.93s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:3 Copy product certificates ----------------------------------------------- 2.55s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:72 Copy Candlepin CA certificate for subscription-manager ------------------ 2.47s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:83 Check if the candlepin container exists --------------------------------- 2.02s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:6 Copy Candlepin CA certificate for system -------------------------------- 1.80s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:92 Add environments -------------------------------------------------------- 1.77s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:127 Ensure that Candlepin container doesn't exist --------------------------- 1.38s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/teardown_candlepin.yml:17 Get facts for external test data ---------------------------------------- 0.67s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:9 Check if system is ostree ----------------------------------------------- 0.66s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_test_data.yml:32 Install GPG key for RPM repositories ------------------------------------ 0.64s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:118 Check Candlepin works --------------------------------------------------- 0.53s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/check_candlepin.yml:3 fedora.linux_system_roles.rhc : Call subscription-manager --------------- 0.48s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/roles/rhc/tasks/subscription-manager.yml:23 Ensure ansible_facts used by the test ----------------------------------- 0.46s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tests_environments.yml:24 Add candlepin hostname to /etc/hosts ------------------------------------ 0.44s /tmp/collections-uHD/ansible_collections/fedora/linux_system_roles/tests/rhc/tasks/setup_candlepin.yml:25 -- Logs begin at Sat 2025-11-08 14:16:42 EST, end at Sat 2025-11-08 14:22:36 EST. -- Nov 08 14:20:32 managed-node2 sshd[7074]: Accepted publickey for root from 10.31.11.198 port 57290 ssh2: ECDSA SHA256:ZOWxvWyD2xOL3Y/Da/RsGPTAcDLp0Rkxb0pNWp6N9Xg Nov 08 14:20:32 managed-node2 systemd[1]: Started Session 8 of user root. -- Subject: Unit session-8.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-8.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:20:32 managed-node2 systemd-logind[594]: New session 8 of user root. -- Subject: A new session 8 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 8 has been created for the user root. -- -- The leading process of the session is 7074. Nov 08 14:20:32 managed-node2 sshd[7074]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 08 14:20:32 managed-node2 sudo[7219]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-feciruvdjkcepszynrvdpowpqzpxtxhp ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629632.5489712-7809-113326970684502/AnsiballZ_stat.py' Nov 08 14:20:32 managed-node2 sudo[7219]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:32 managed-node2 platform-python[7222]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Nov 08 14:20:32 managed-node2 sudo[7219]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:33 managed-node2 sudo[7345]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rqxzbysqztexccunisruayuytqgrovgm ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629633.1013393-7826-86243996407035/AnsiballZ_setup.py' Nov 08 14:20:33 managed-node2 sudo[7345]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:33 managed-node2 platform-python[7348]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 08 14:20:33 managed-node2 sudo[7345]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:34 managed-node2 sudo[7475]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tvjmvcjvbfbunafosouqujkbqmjwrebr ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629633.8423815-7843-132735495566975/AnsiballZ_lineinfile.py' Nov 08 14:20:34 managed-node2 sudo[7475]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:34 managed-node2 platform-python[7478]: ansible-lineinfile Invoked with path=/etc/hosts line=127.0.0.1 candlepin.local regexp=.*candlepin.local state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:20:34 managed-node2 sudo[7475]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:34 managed-node2 sudo[7601]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypjagobzumsskmlrrdbkbyujjkjasqss ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629634.2863293-7851-91528192231875/AnsiballZ_setup.py' Nov 08 14:20:34 managed-node2 sudo[7601]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:34 managed-node2 platform-python[7604]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 08 14:20:34 managed-node2 sudo[7601]: pam_unix(sudo:session): session closed for user root Nov 08 14:20:34 managed-node2 sudo[7672]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nfoxkinrsdmqfwzqkpbggdgghtavctoz ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629634.2863293-7851-91528192231875/AnsiballZ_dnf.py' Nov 08 14:20:34 managed-node2 sudo[7672]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:20:35 managed-node2 platform-python[7675]: ansible-ansible.legacy.dnf Invoked with name=['podman'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 08 14:20:51 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:20:52 managed-node2 setsebool[7755]: The virt_use_nfs policy boolean was changed to 1 by root Nov 08 14:20:52 managed-node2 setsebool[7755]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Nov 08 14:21:08 managed-node2 kernel: SELinux: Converting 360 SID table entries... Nov 08 14:21:08 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Nov 08 14:21:08 managed-node2 kernel: SELinux: policy capability open_perms=1 Nov 08 14:21:08 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Nov 08 14:21:08 managed-node2 kernel: SELinux: policy capability always_check_network=0 Nov 08 14:21:08 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Nov 08 14:21:08 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Nov 08 14:21:08 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:21:09 managed-node2 kernel: fuse: init (API version 7.34) Nov 08 14:21:09 managed-node2 systemd[1]: Mounting FUSE Control File System... -- Subject: Unit sys-fs-fuse-connections.mount has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has begun starting up. Nov 08 14:21:09 managed-node2 systemd[1]: Mounted FUSE Control File System. -- Subject: Unit sys-fs-fuse-connections.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-fs-fuse-connections.mount has finished starting up. -- -- The start-up result is done. Nov 08 14:21:10 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:21:10 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:21:35 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r0f88d2f6c59b49e79c5e09f49690c693.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r0f88d2f6c59b49e79c5e09f49690c693.service has finished starting up. -- -- The start-up result is done. Nov 08 14:21:35 managed-node2 systemd[1]: cgroup compatibility translation between legacy and unified hierarchy settings activated. See cgroup-compat debug messages for details. Nov 08 14:21:35 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 08 14:21:35 managed-node2 systemd[1]: Reloading. Nov 08 14:21:36 managed-node2 sudo[7672]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:36 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 08 14:21:36 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 08 14:21:36 managed-node2 systemd[1]: run-r0f88d2f6c59b49e79c5e09f49690c693.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r0f88d2f6c59b49e79c5e09f49690c693.service has successfully entered the 'dead' state. Nov 08 14:21:36 managed-node2 sudo[10231]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jwikugndmazkjffwrsiuypokmyzonzts ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629696.1727047-9627-278520668977616/AnsiballZ_command.py' Nov 08 14:21:36 managed-node2 sudo[10231]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:36 managed-node2 platform-python[10234]: ansible-ansible.legacy.command Invoked with argv=['podman', 'ps', '-a', '--filter', 'name=candlepin'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:21:38 managed-node2 kernel: evm: overlay not supported Nov 08 14:21:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 08 14:21:38 managed-node2 sudo[10231]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:38 managed-node2 sudo[10366]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nprklycktkngjommzkxlvzvhdlsfmjeg ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629698.2285655-9650-197196704608691/AnsiballZ_command.py' Nov 08 14:21:38 managed-node2 sudo[10366]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:38 managed-node2 platform-python[10369]: ansible-ansible.legacy.command Invoked with argv=['podman', 'run', '--rm', '--detach', '--hostname', 'candlepin.local', '--name', 'candlepin', '--publish', '8443:8443', '--publish', '8080:8080', 'ghcr.io/candlepin/candlepin-unofficial'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:21:53 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5206] manager: (cni-podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Nov 08 14:21:53 managed-node2 systemd-udevd[10404]: Using default interface naming scheme 'rhel-8.0'. Nov 08 14:21:53 managed-node2 systemd-udevd[10404]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Nov 08 14:21:53 managed-node2 systemd-udevd[10404]: Could not generate persistent MAC address for cni-podman0: No such file or directory Nov 08 14:21:53 managed-node2 systemd-udevd[10407]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Nov 08 14:21:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): veth19774ab7: link is not ready Nov 08 14:21:53 managed-node2 systemd-udevd[10407]: Could not generate persistent MAC address for veth19774ab7: No such file or directory Nov 08 14:21:53 managed-node2 kernel: cni-podman0: port 1(veth19774ab7) entered blocking state Nov 08 14:21:53 managed-node2 kernel: cni-podman0: port 1(veth19774ab7) entered disabled state Nov 08 14:21:53 managed-node2 kernel: device veth19774ab7 entered promiscuous mode Nov 08 14:21:53 managed-node2 dbus-daemon[595]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=661 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5425] manager: (veth19774ab7): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5454] device (cni-podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5459] device (cni-podman0): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5466] device (cni-podman0): Activation: starting connection 'cni-podman0' (78dd12fc-9dad-41e6-8ec1-71502926505e) Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5467] device (cni-podman0): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5469] device (cni-podman0): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5471] device (cni-podman0): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5473] device (cni-podman0): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Nov 08 14:21:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Nov 08 14:21:53 managed-node2 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth19774ab7: link becomes ready Nov 08 14:21:53 managed-node2 kernel: cni-podman0: port 1(veth19774ab7) entered blocking state Nov 08 14:21:53 managed-node2 kernel: cni-podman0: port 1(veth19774ab7) entered forwarding state Nov 08 14:21:53 managed-node2 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5893] device (veth19774ab7): carrier: link connected Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.5896] device (cni-podman0): carrier: link connected Nov 08 14:21:53 managed-node2 dbus-daemon[595]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Nov 08 14:21:53 managed-node2 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.6065] device (cni-podman0): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.6068] device (cni-podman0): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Nov 08 14:21:53 managed-node2 NetworkManager[661]: [1762629713.6072] device (cni-podman0): Activation: successful, device activated. Nov 08 14:21:54 managed-node2 systemd[1]: Created slice machine.slice. -- Subject: Unit machine.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit machine.slice has finished starting up. -- -- The start-up result is done. Nov 08 14:21:54 managed-node2 systemd[1]: Started libpod-conmon-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope. -- Subject: Unit libpod-conmon-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-conmon-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:21:54 managed-node2 systemd[1]: Started libcontainer container 22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa. -- Subject: Unit libpod-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit libpod-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:21:54 managed-node2 systemd[1]: run-runc-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa-runc.vsCFCJ.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-runc-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa-runc.vsCFCJ.mount has successfully entered the 'dead' state. Nov 08 14:21:54 managed-node2 sudo[10366]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:54 managed-node2 sudo[10782]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hxhytvoysoyykjwbymupjlmsfxynrdrw ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629714.433387-9770-78386727662907/AnsiballZ_file.py' Nov 08 14:21:54 managed-node2 sudo[10782]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:55 managed-node2 platform-python[10785]: ansible-file Invoked with path=/etc/pki/product state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:21:55 managed-node2 sudo[10782]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:56 managed-node2 sudo[10909]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tinjlluybyxjgkgmeayzjdaurriletif ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629715.3637688-9770-237228015973244/AnsiballZ_file.py' Nov 08 14:21:56 managed-node2 sudo[10909]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:56 managed-node2 platform-python[10912]: ansible-file Invoked with path=/etc/pki/product-default state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:21:56 managed-node2 sudo[10909]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:57 managed-node2 sudo[11035]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwcthrryofefivsqkfgqwohmjubfblnl ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629716.8702433-9770-67920657985771/AnsiballZ_file.py' Nov 08 14:21:57 managed-node2 sudo[11035]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:21:58 managed-node2 platform-python[11038]: ansible-file Invoked with path=/etc/rhsm/ca state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:21:58 managed-node2 sudo[11035]: pam_unix(sudo:session): session closed for user root Nov 08 14:21:59 managed-node2 sudo[11161]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mrbrvbkkxajedgqzygsyhflowijtvowi ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629718.7913015-9838-111552025631472/AnsiballZ_command.py' Nov 08 14:21:59 managed-node2 sudo[11161]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:00 managed-node2 platform-python[11164]: ansible-ansible.legacy.command Invoked with argv=['podman', 'cp', 'candlepin:/home/candlepin/devel/candlepin/generated_certs/7050.pem', '/etc/pki/product-default/'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:22:00 managed-node2 sudo[11161]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:02 managed-node2 sudo[11323]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gxvfqelhengegkymtesssvtppxenlycy ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629721.4366794-9867-191476916873619/AnsiballZ_command.py' Nov 08 14:22:02 managed-node2 sudo[11323]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:02 managed-node2 platform-python[11326]: ansible-ansible.legacy.command Invoked with argv=['podman', 'cp', 'candlepin:/etc/candlepin/certs/candlepin-ca.crt', '/etc/rhsm/ca/candlepin-ca.pem'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:22:03 managed-node2 sudo[11323]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:03 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Nov 08 14:22:04 managed-node2 sudo[11488]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qxvilgufcxtzdacphurffmfueydwhuhh ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629723.776494-9889-3056431306250/AnsiballZ_command.py' Nov 08 14:22:04 managed-node2 sudo[11488]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:04 managed-node2 platform-python[11491]: ansible-ansible.legacy.command Invoked with argv=['podman', 'cp', 'candlepin:/etc/candlepin/certs/candlepin-ca.crt', '/etc/pki/ca-trust/source/anchors/candlepin-ca.pem'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:22:05 managed-node2 sudo[11488]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:06 managed-node2 sudo[11651]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-opqchrhtjhdoetdqjvqzmbwttkzzjfzs ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629725.5513337-9911-257669021964786/AnsiballZ_command.py' Nov 08 14:22:06 managed-node2 sudo[11651]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:06 managed-node2 platform-python[11654]: ansible-ansible.legacy.command Invoked with argv=['update-ca-trust', 'extract'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:22:08 managed-node2 sudo[11651]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:10 managed-node2 sudo[11784]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nnppdaozkqkczmntszxhrjtgdtrizuuy ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629729.0613291-9940-25723249799290/AnsiballZ_uri.py' Nov 08 14:22:10 managed-node2 sudo[11784]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:10 managed-node2 platform-python[11787]: ansible-ansible.legacy.uri Invoked with url=https://candlepin.local:8443/candlepin method=HEAD validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False body_format=raw return_content=False follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:22:21 managed-node2 sudo[11784]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:21 managed-node2 sudo[12007]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mbsjixzciirgxjybsvaydimszcsdqqhn ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629741.7135143-10077-30968526963278/AnsiballZ_get_url.py' Nov 08 14:22:21 managed-node2 sudo[12007]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:22 managed-node2 platform-python[12010]: ansible-get_url Invoked with url=http://candlepin.local:8080/RPM-GPG-KEY-candlepin dest=/etc/pki/rpm-gpg/RPM-GPG-KEY-candlepin mode=0644 force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:22:22 managed-node2 sudo[12007]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:22 managed-node2 sudo[12135]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ucybksuhfpgeiiigvrjsbnjylotrsgzy ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629742.364075-10098-128201292251298/AnsiballZ_uri.py' Nov 08 14:22:22 managed-node2 sudo[12135]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:22 managed-node2 platform-python[12138]: ansible-ansible.legacy.uri Invoked with url=https://candlepin.local:8443/candlepin/owners/donaldduck/environments method=POST url_username=doc url_********=NOT_LOGGING_PARAMETER body_format=json body={'name': 'Environment 1', 'description': 'The environment 1', 'id': 'envId1'} force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False return_content=False follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False client_cert=None client_key=None dest=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:22:23 managed-node2 sudo[12135]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:23 managed-node2 sudo[12263]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mxearfcxvwtmdqefyyiumxvbmobfyzia ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629743.4844284-10098-190971838632617/AnsiballZ_uri.py' Nov 08 14:22:23 managed-node2 sudo[12263]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:23 managed-node2 platform-python[12266]: ansible-ansible.legacy.uri Invoked with url=https://candlepin.local:8443/candlepin/owners/donaldduck/environments method=POST url_username=doc url_********=NOT_LOGGING_PARAMETER body_format=json body={'name': 'Environment 2', 'description': 'The environment 2', 'id': 'envId2'} force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False return_content=False follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False client_cert=None client_key=None dest=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:22:24 managed-node2 sudo[12263]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:24 managed-node2 sudo[12391]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zecmicoitmwekstktcrlgtosjkufwywj ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629744.1054673-10130-69351361400653/AnsiballZ_uri.py' Nov 08 14:22:24 managed-node2 sudo[12391]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:24 managed-node2 platform-python[12394]: ansible-ansible.legacy.uri Invoked with url=https://candlepin.local:8443/candlepin method=HEAD validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False body_format=raw return_content=False follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Nov 08 14:22:24 managed-node2 sudo[12391]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:24 managed-node2 sudo[12520]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ynaizgyhflhevnresvhhuozwkxdjfslz ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629744.6569889-10145-117993771399708/AnsiballZ_setup.py' Nov 08 14:22:24 managed-node2 sudo[12520]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:25 managed-node2 platform-python[12523]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Nov 08 14:22:25 managed-node2 sudo[12520]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:25 managed-node2 sudo[12650]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oijmvhxlmorqgylqveoqbgqgzpvgrssz ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629745.2512803-10168-246514594664934/AnsiballZ_setup.py' Nov 08 14:22:25 managed-node2 sudo[12650]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:25 managed-node2 platform-python[12653]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 08 14:22:25 managed-node2 sudo[12650]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:25 managed-node2 sudo[12721]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-slezqzekjxklpihkytjbnnapjbgmgckw ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629745.2512803-10168-246514594664934/AnsiballZ_dnf.py' Nov 08 14:22:25 managed-node2 sudo[12721]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:25 managed-node2 platform-python[12724]: ansible-ansible.legacy.dnf Invoked with name=['subscription-manager'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 08 14:22:28 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:22:28 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:22:28 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:22:28 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:22:29 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:22:29 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:22:29 managed-node2 dbus-daemon[595]: [system] Reloaded configuration Nov 08 14:22:29 managed-node2 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-rdeaa4ca55e9c4f31ba4070d63d638b8a.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-rdeaa4ca55e9c4f31ba4070d63d638b8a.service has finished starting up. -- -- The start-up result is done. Nov 08 14:22:29 managed-node2 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Nov 08 14:22:29 managed-node2 systemd[1]: Reloading. Nov 08 14:22:29 managed-node2 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Nov 08 14:22:29 managed-node2 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Nov 08 14:22:29 managed-node2 systemd[1]: run-rdeaa4ca55e9c4f31ba4070d63d638b8a.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-rdeaa4ca55e9c4f31ba4070d63d638b8a.service has successfully entered the 'dead' state. Nov 08 14:22:29 managed-node2 sudo[12721]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:30 managed-node2 sudo[12989]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-webeisoegscgrmpfaziepqhmxtdxoftu ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629750.0417087-10217-119684284293845/AnsiballZ_redhat_subscription.py' Nov 08 14:22:30 managed-node2 sudo[12989]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:30 managed-node2 sudo[12989]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:30 managed-node2 sudo[13115]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vzgfxkxhgobnjgwkedakpyihxhixafar ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629750.6104856-10241-18227112371785/AnsiballZ_setup.py' Nov 08 14:22:30 managed-node2 sudo[13115]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:30 managed-node2 platform-python[13118]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Nov 08 14:22:31 managed-node2 sudo[13115]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:31 managed-node2 sudo[13186]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tsymagkengiyvezqiijhikafzuuqxrph ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629750.6104856-10241-18227112371785/AnsiballZ_dnf.py' Nov 08 14:22:31 managed-node2 sudo[13186]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:31 managed-node2 platform-python[13189]: ansible-ansible.legacy.dnf Invoked with name=['subscription-manager'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Nov 08 14:22:33 managed-node2 sudo[13186]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:33 managed-node2 sudo[13313]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-brfpwmyksbavdpenzwkvcdinswkhiuvs ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629753.593116-10275-94820805282821/AnsiballZ_redhat_subscription.py' Nov 08 14:22:33 managed-node2 sudo[13313]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:33 managed-node2 sudo[13313]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:34 managed-node2 sudo[13439]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qnxmatacgriywjkficlalrpcgwpelczf ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629754.086123-10293-35901109732132/AnsiballZ_redhat_subscription.py' Nov 08 14:22:34 managed-node2 sudo[13439]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:34 managed-node2 sudo[13439]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:34 managed-node2 sudo[13565]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ythgojbgksiyvmmjeahuphdsiwfuzrbe ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629754.4433599-10309-117236669964463/AnsiballZ_command.py' Nov 08 14:22:34 managed-node2 sudo[13565]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:34 managed-node2 platform-python[13568]: ansible-ansible.legacy.command Invoked with argv=['podman', 'ps', '-a', '--filter', 'name=candlepin'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:22:34 managed-node2 sudo[13565]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:34 managed-node2 sudo[13698]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wtzjarbipszquvfowhldydrpzsvdasrt ; /usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1762629754.820674-10317-248906781858230/AnsiballZ_command.py' Nov 08 14:22:34 managed-node2 sudo[13698]: pam_unix(sudo:session): session opened for user root by root(uid=0) Nov 08 14:22:35 managed-node2 platform-python[13701]: ansible-ansible.legacy.command Invoked with argv=['podman', 'stop', 'candlepin'] _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None chdir=None executable=None creates=None removes=None stdin=None Nov 08 14:22:35 managed-node2 systemd[1]: libpod-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope has successfully entered the 'dead' state. Nov 08 14:22:35 managed-node2 systemd[1]: libpod-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope: Consumed 49.582s CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope completed and consumed the indicated resources. Nov 08 14:22:36 managed-node2 kernel: cni-podman0: port 1(veth19774ab7) entered disabled state Nov 08 14:22:36 managed-node2 kernel: device veth19774ab7 left promiscuous mode Nov 08 14:22:36 managed-node2 kernel: cni-podman0: port 1(veth19774ab7) entered disabled state Nov 08 14:22:36 managed-node2 systemd[1]: run-netns-netns\x2dfb55c28f\x2d2049\x2db808\x2db64e\x2d9005954c3134.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2dfb55c28f\x2d2049\x2db808\x2db64e\x2d9005954c3134.mount has successfully entered the 'dead' state. Nov 08 14:22:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa-userdata-shm.mount has successfully entered the 'dead' state. Nov 08 14:22:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay-90f267f1906ff626dc7c9dc049a0330080ad3fc600df17ac1a3ba1d7b1cc668a-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-90f267f1906ff626dc7c9dc049a0330080ad3fc600df17ac1a3ba1d7b1cc668a-merged.mount has successfully entered the 'dead' state. Nov 08 14:22:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Nov 08 14:22:36 managed-node2 sudo[13698]: pam_unix(sudo:session): session closed for user root Nov 08 14:22:36 managed-node2 systemd[1]: libpod-conmon-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit libpod-conmon-22b35e80ac6d2db5998d8c729153d77c68b3d23b414be8f192b70d0773c158aa.scope has successfully entered the 'dead' state. Nov 08 14:22:36 managed-node2 sshd[13851]: Accepted publickey for root from 10.31.11.198 port 34588 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 08 14:22:36 managed-node2 systemd-logind[594]: New session 9 of user root. -- Subject: A new session 9 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 9 has been created for the user root. -- -- The leading process of the session is 13851. Nov 08 14:22:36 managed-node2 systemd[1]: Started Session 9 of user root. -- Subject: Unit session-9.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-9.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:22:36 managed-node2 sshd[13851]: pam_unix(sshd:session): session opened for user root by (uid=0) Nov 08 14:22:36 managed-node2 sshd[13854]: Received disconnect from 10.31.11.198 port 34588:11: disconnected by user Nov 08 14:22:36 managed-node2 sshd[13854]: Disconnected from user root 10.31.11.198 port 34588 Nov 08 14:22:36 managed-node2 sshd[13851]: pam_unix(sshd:session): session closed for user root Nov 08 14:22:36 managed-node2 systemd[1]: session-9.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit session-9.scope has successfully entered the 'dead' state. Nov 08 14:22:36 managed-node2 systemd-logind[594]: Session 9 logged out. Waiting for processes to exit. Nov 08 14:22:36 managed-node2 systemd-logind[594]: Removed session 9. -- Subject: Session 9 has been terminated -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 9 has been terminated. Nov 08 14:22:36 managed-node2 sshd[13875]: Accepted publickey for root from 10.31.11.198 port 34596 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Nov 08 14:22:36 managed-node2 systemd[1]: Started Session 10 of user root. -- Subject: Unit session-10.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-10.scope has finished starting up. -- -- The start-up result is done. Nov 08 14:22:36 managed-node2 systemd-logind[594]: New session 10 of user root. -- Subject: A new session 10 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 10 has been created for the user root. -- -- The leading process of the session is 13875. Nov 08 14:22:36 managed-node2 sshd[13875]: pam_unix(sshd:session): session opened for user root by (uid=0)