# STDOUT: ---v---v---v---v---v--- ansible-playbook [core 2.16.0] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.16/lib/python3.11/site-packages/ansible ansible collection location = /WORKDIR/git-weekly-ciglljd_4n/.collection executable location = /opt/ansible-2.16/bin/ansible-playbook python version = 3.11.5 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/opt/ansible-2.16/bin/python) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_sbd_all_options_play.yml *************************************** 2 plays in /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:4 Saturday 23 March 2024 12:03:37 +0000 (0:00:00.013) 0:00:00.013 ******** ok: [sut] => { "ansible_facts": { "ha_cluster_hacluster_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n31303833633366333561656439323930303361333161363239346166656537323933313436\n3432386236656563343237306335323637396239616230353561330a313731623238393238\n62343064666336643930663239383936616465643134646536656532323461356237646133\n3761616633323839633232353637366266350a313163633236376666653238633435306565\n3264623032333736393535663833\n" } }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciglljd_4n/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Maximal SBD configuration (play)] **************************************** TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:9 Saturday 23 March 2024 12:03:38 +0000 (0:00:00.014) 0:00:00.027 ******** ok: [sut] TASK [Set up test environment] ************************************************* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:17 Saturday 23 March 2024 12:03:39 +0000 (0:00:01.008) 0:00:01.036 ******** TASK [fedora.linux_system_roles.ha_cluster : Set node name to 'localhost' for single-node clusters] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:9 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.019) 0:00:01.055 ******** ok: [sut] => { "ansible_facts": { "inventory_hostname": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Ensure facts used by tests] ******* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:14 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.018) 0:00:01.074 ******** skipping: [sut] => { "changed": false, "false_condition": "'distribution' not in ansible_facts", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:22 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.007) 0:00:01.082 ******** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:27 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.281) 0:00:01.364 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Do not try to enable RHEL repositories] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:32 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.014) 0:00:01.378 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_distribution == 'RedHat'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Copy nss-altfiles ha_cluster users to /etc/passwd] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:41 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.008) 0:00:01.386 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_is_ostree | d(false)", "skip_reason": "Conditional result was False" } TASK [Set up test environment for SBD] ***************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:22 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.018) 0:00:01.404 ******** TASK [fedora.linux_system_roles.ha_cluster : Load softdog module for SBD to have at least one watchdog] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_sbd.yml:9 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.017) 0:00:01.422 ******** skipping: [sut] => { "changed": false, "false_condition": "not (__test_disable_modprobe | d(false))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Create backing files for SBD devices] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_sbd.yml:15 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.018) 0:00:01.441 ******** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.y5zhny0c_ha_cluster_tests", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Initialize backing files for SBD devices] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_sbd.yml:21 Saturday 23 March 2024 12:03:39 +0000 (0:00:00.276) 0:00:01.718 ******** changed: [sut] => { "changed": true, "cmd": [ "dd", "if=/dev/zero", "of=/tmp/ansible.y5zhny0c_ha_cluster_tests", "bs=1M", "count=10" ], "delta": "0:00:00.081111", "end": "2024-03-23 12:03:40.045810", "rc": 0, "start": "2024-03-23 12:03:39.964699" } STDERR: 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00491363 s, 2.1 GB/s TASK [fedora.linux_system_roles.ha_cluster : Mount SBD devices] **************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_sbd.yml:26 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.376) 0:00:02.095 ******** changed: [sut] => { "changed": true, "cmd": [ "losetup", "--show", "--find", "/tmp/ansible.y5zhny0c_ha_cluster_tests" ], "delta": "0:00:00.006335", "end": "2024-03-23 12:03:40.289866", "rc": 0, "start": "2024-03-23 12:03:40.283531" } STDOUT: /dev/loop0 TASK [Generate SBD devices and watchdogs variables] **************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:31 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.256) 0:00:02.351 ******** ok: [sut] => (item=sut) => { "ansible_facts": { "__test_node_options": [ { "node_name": "localhost", "sbd_devices": [ "/dev/loop0" ], "sbd_watchdog": "/dev/null", "sbd_watchdog_modules": [ "softdog" ], "sbd_watchdog_modules_blocklist": [ "iTCO_wdt" ] } ] }, "ansible_loop_var": "item", "changed": false, "item": "sut" } TASK [Run the role and assert results] ***************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:46 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.023) 0:00:02.375 ******** included: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml for sut TASK [Ensure modprobe files are not present] *********************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:17 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.017) 0:00:02.392 ******** ok: [sut] => (item=/etc/modprobe.d/iTCO_wdt.conf) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/modprobe.d/iTCO_wdt.conf", "path": "/etc/modprobe.d/iTCO_wdt.conf", "state": "absent" } ok: [sut] => (item=/etc/modules-load.d/softdog.conf) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/modules-load.d/softdog.conf", "path": "/etc/modules-load.d/softdog.conf", "state": "absent" } TASK [Run HA Cluster role] ***************************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:25 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.514) 0:00:02.906 ******** TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:3 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.031) 0:00:02.937 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:2 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.014) 0:00:02.952 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:10 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.018) 0:00:02.971 ******** skipping: [sut] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:15 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.012) 0:00:02.984 ******** skipping: [sut] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:19 Saturday 23 March 2024 12:03:40 +0000 (0:00:00.011) 0:00:02.995 ******** ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_cloud_agents_packages": [], "__ha_cluster_fence_agent_packages_default": "{{ ['fence-agents-all'] + (['fence-virt'] if ansible_architecture == 'x86_64' else []) }}", "__ha_cluster_fullstack_node_packages": [ "corosync", "libknet1-plugins-all", "resource-agents", "pacemaker", "openssl" ], "__ha_cluster_pcs_provider": "pcs-0.10", "__ha_cluster_qdevice_node_packages": [ "corosync-qdevice", "bash", "coreutils", "curl", "grep", "nss-tools", "openssl", "sed" ], "__ha_cluster_repos": [], "__ha_cluster_role_essential_packages": [ "pcs", "corosync-qnetd" ], "__ha_cluster_sbd_packages": [ "sbd" ], "__ha_cluster_services": [ "corosync", "corosync-qdevice", "pacemaker" ] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "__ha_cluster_cloud_agents_packages": [] }, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_38.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_38.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_38.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "Fedora_38.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set Linux Pacemaker shell specific variables] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:34 Saturday 23 March 2024 12:03:41 +0000 (0:00:00.028) 0:00:03.023 ******** ok: [sut] => { "ansible_facts": {}, "ansible_included_var_files": [ "/WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/shell_pcs.yml" ], "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Enable package repositories] ****** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:6 Saturday 23 March 2024 12:03:41 +0000 (0:00:00.013) 0:00:03.037 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Find platform/version specific tasks to enable repositories] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:3 Saturday 23 March 2024 12:03:41 +0000 (0:00:00.016) 0:00:03.054 ******** ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/RedHat.yml" }, "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/Fedora.yml" }, "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_38.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "Fedora_38.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_38.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "Fedora_38.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Run platform/version specific tasks to enable repositories] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:21 Saturday 23 March 2024 12:03:41 +0000 (0:00:00.026) 0:00:03.081 ******** TASK [fedora.linux_system_roles.ha_cluster : Install role essential packages] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 Saturday 23 March 2024 12:03:41 +0000 (0:00:00.022) 0:00:03.104 ******** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: rubygem-tilt-2.0.11-1.fc38.noarch", "Installed: pcs-0.11.7-3.fc38.noarch", "Installed: rubygem-puma-5.6.5-3.fc38.x86_64", "Installed: corosync-3.1.8-1.fc38.x86_64", "Installed: pacemaker-cli-2.1.7-4.fc38.x86_64", "Installed: libnozzle1-1.25-2.fc38.x86_64", "Installed: corosync-qnetd-3.0.3-2.fc38.x86_64", "Installed: corosynclib-3.1.8-1.fc38.x86_64", "Installed: python3-cffi-1.15.1-4.fc38.x86_64", "Installed: rubygem-mustermann-1.1.1-9.fc38.noarch", "Installed: bzip2-1.0.8-13.fc38.x86_64", "Installed: rubygem-rack-1:2.2.4-2.fc38.noarch", "Installed: resource-agents-4.13.0-1.fc38.x86_64", "Installed: rubygem-io-console-0.6.0-180.fc38.x86_64", "Installed: rubygem-rack-protection-3.0.5-2.fc38.noarch", "Installed: nss-tools-3.98.0-1.fc38.x86_64", "Installed: rubygem-rack-test-1.1.0-7.fc38.noarch", "Installed: rubygem-json-2.6.3-202.fc38.x86_64", "Installed: logrotate-3.21.0-2.fc38.x86_64", "Installed: pacemaker-2.1.7-4.fc38.x86_64", "Installed: perl-TimeDate-1:2.33-10.fc38.noarch", "Installed: libknet1-1.25-2.fc38.x86_64", "Installed: libknet1-compress-bzip2-plugin-1.25-2.fc38.x86_64", "Installed: libknet1-compress-lz4-plugin-1.25-2.fc38.x86_64", "Installed: libknet1-compress-lzma-plugin-1.25-2.fc38.x86_64", "Installed: libknet1-compress-lzo2-plugin-1.25-2.fc38.x86_64", "Installed: libknet1-compress-plugins-all-1.25-2.fc38.x86_64", "Installed: libknet1-compress-zlib-plugin-1.25-2.fc38.x86_64", "Installed: libknet1-crypto-nss-plugin-1.25-2.fc38.x86_64", "Installed: libknet1-crypto-openssl-plugin-1.25-2.fc38.x86_64", "Installed: libknet1-crypto-plugins-all-1.25-2.fc38.x86_64", "Installed: libknet1-compress-zstd-plugin-1.25-2.fc38.x86_64", "Installed: rubygem-nio4r-2.5.8-3.fc38.x86_64", "Installed: libknet1-plugins-all-1.25-2.fc38.x86_64", "Installed: python3-cryptography-37.0.2-8.fc38.x86_64", "Installed: pacemaker-cluster-libs-2.1.7-4.fc38.x86_64", "Installed: pacemaker-libs-2.1.7-4.fc38.x86_64", "Installed: pacemaker-schemas-2.1.7-4.fc38.noarch", "Installed: rubygem-backports-3.23.0-2.fc38.noarch", "Installed: python3-tornado-6.2.0-3.fc38.x86_64", "Installed: rubygem-rdoc-6.5.0-180.fc38.noarch", "Installed: ruby-3.2.2-180.fc38.x86_64", "Installed: rubygem-rexml-3.2.5-180.fc38.noarch", "Installed: rubygem-sinatra-1:3.0.5-2.fc38.noarch", "Installed: ruby-default-gems-3.2.2-180.fc38.noarch", "Installed: python3-psutil-5.9.5-1.fc38.x86_64", "Installed: ruby-libs-3.2.2-180.fc38.x86_64", "Installed: libqb-2.0.8-1.fc38.x86_64", "Installed: rubygem-bigdecimal-3.1.3-180.fc38.x86_64", "Installed: rubypick-1.1.1-18.fc38.noarch", "Installed: rubygem-ethon-0.15.0-4.fc38.noarch", "Installed: rubygem-bundler-2.4.10-180.fc38.noarch", "Installed: rubygem-psych-5.0.1-180.fc38.x86_64", "Installed: python3-pycparser-2.20-9.fc38.noarch", "Installed: python3-ply-3.11-18.fc38.noarch", "Installed: python3-pyparsing-3.0.9-3.fc38.noarch", "Installed: net-snmp-libs-1:5.9.4-1.fc38.x86_64", "Installed: rubygems-3.4.10-180.fc38.noarch", "Installed: rubygem-childprocess-4.1.0-4.fc38.noarch", "Installed: python3-pycurl-7.45.2-3.fc38.x86_64", "Installed: rubygem-ffi-1.15.5-6.fc38.x86_64", "Installed: lzo-2.10-8.fc38.x86_64" ] } lsrpackages: corosync-qnetd pcs TASK [fedora.linux_system_roles.ha_cluster : Check and prepare role variables] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:17 Saturday 23 March 2024 12:04:00 +0000 (0:00:19.189) 0:00:22.293 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Discover cluster node names] ****** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:3 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.025) 0:00:22.319 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_node_name": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Collect cluster node names] ******* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:7 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.021) 0:00:22.340 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_all_node_names": [ "localhost" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_node_options contains unknown or duplicate nodes] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:16 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.022) 0:00:22.363 ******** skipping: [sut] => { "changed": false, "false_condition": "(\n __nodes_from_options != (__nodes_from_options | unique)\n) or (\n __nodes_from_options | difference(__ha_cluster_all_node_names)\n)\n", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Extract node options] ************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:30 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.018) 0:00:22.382 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_local_node": { "node_name": "localhost", "sbd_devices": [ "/dev/loop0" ], "sbd_watchdog": "/dev/null", "sbd_watchdog_modules": [ "softdog" ], "sbd_watchdog_modules_blocklist": [ "iTCO_wdt" ] } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:43 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.025) 0:00:22.407 ******** skipping: [sut] => (item=ha_cluster_hacluster_password) => { "ansible_loop_var": "item", "changed": false, "false_condition": "lookup(\"vars\", item, default=\"\") | string | length < 1", "item": "ha_cluster_hacluster_password", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Fail if nodes do not have the same number of SBD devices specified] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:53 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.032) 0:00:22.440 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_play_hosts | map('extract', hostvars,\n ['__ha_cluster_local_node', 'sbd_devices'])\n| map('default', [], true) | map('length') | unique | length > 1\n", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if configuring qnetd on a cluster node] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:69 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.022) 0:00:22.463 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no valid level is specified for a fencing level] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:79 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.018) 0:00:22.481 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no target is specified for a fencing level] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:87 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.008) 0:00:22.490 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Extract qdevice settings] ********* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:101 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.008) 0:00:22.498 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_qdevice_host": "", "__ha_cluster_qdevice_in_use": false, "__ha_cluster_qdevice_model": "", "__ha_cluster_qdevice_pcs_address": "" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Figure out if ATB needs to be enabled for SBD] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:110 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.027) 0:00:22.526 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_sbd_needs_atb": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if SBD needs ATB enabled and the user configured ATB to be disabled] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:120 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.024) 0:00:22.550 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_sbd_needs_atb | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_pcsd_public_key_src and ha_cluster_pcsd_private_key_src are set along with ha_cluster_pcsd_certificates] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:127 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.016) 0:00:22.567 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is not none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities] *********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141 Saturday 23 March 2024 12:04:00 +0000 (0:00:00.011) 0:00:22.578 ******** ok: [sut] => { "changed": false, "cmd": [ "pcs", "--version", "--full" ], "delta": "0:00:01.504358", "end": "2024-03-23 12:04:02.318481", "rc": 0, "start": "2024-03-23 12:04:00.814123" } STDOUT: 0.11.7 booth booth.enable-authfile.unset cluster.config.backup-local cluster.config.restore-cluster cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.local cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all cluster.report cluster.verify corosync.authkey.update corosync.config.get corosync.config.get.struct corosync.config.reload corosync.config.sync-to-local-cluster corosync.config.update corosync.link.add corosync.link.remove corosync.link.remove.list corosync.link.update corosync.qdevice corosync.qdevice.model.net corosync.quorum corosync.quorum.device corosync.quorum.device.client.model.net.certificates.local corosync.quorum.device.heuristics corosync.quorum.device.model.net corosync.quorum.device.model.net.options_tls_and_kaptb corosync.quorum.set-expected-votes-runtime corosync.quorum.status corosync.quorum.unblock corosync.totem.block_unlisted_ips corosync.uidgid node.add node.add.enable node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.attributes.set-list-for-node node.confirm-off node.fence node.guest node.kill node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remote node.remote.onfail-demote node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.list node.start-stop-enable-disable.start-wait node.utilization node.utilization.set-list-for-node pcmk.acl.enable-disable pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.user pcmk.alert pcmk.cib.checkpoints pcmk.cib.checkpoints.diff pcmk.cib.edit pcmk.cib.get pcmk.cib.get.scope pcmk.cib.roles.promoted-unpromoted pcmk.cib.set pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.colocation.simple.options pcmk.constraint.config.output-formats pcmk.constraint.hide-expired pcmk.constraint.location.simple pcmk.constraint.location.simple.options pcmk.constraint.location.simple.resource-regexp pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.location.simple.rule.options pcmk.constraint.location.simple.rule.rule-add-remove pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.order.simple.options pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.cluster.config.output-formats pcmk.properties.cluster.defaults pcmk.properties.cluster.describe pcmk.properties.cluster.describe.output-formats pcmk.properties.cluster.set_properties.iso8601duration pcmk.properties.operation-defaults pcmk.properties.operation-defaults.multiple pcmk.properties.operation-defaults.rule pcmk.properties.operation-defaults.rule-rsc-op pcmk.properties.operation-defaults.rule.hide-expired pcmk.properties.operation-defaults.rule.node-attr-type-number pcmk.properties.resource-defaults pcmk.properties.resource-defaults.multiple pcmk.properties.resource-defaults.rule pcmk.properties.resource-defaults.rule-rsc-op pcmk.properties.resource-defaults.rule.hide-expired pcmk.properties.resource-defaults.rule.node-attr-type-number pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.bundles pcmk.resource.ban-move-clear.clear-expired pcmk.resource.ban-move-clear.clone pcmk.resource.bundle pcmk.resource.bundle.container-docker pcmk.resource.bundle.container-docker.promoted-max pcmk.resource.bundle.container-podman pcmk.resource.bundle.container-podman.promoted-max pcmk.resource.bundle.container-rkt pcmk.resource.bundle.container-rkt.promoted-max pcmk.resource.bundle.reset pcmk.resource.bundle.wait pcmk.resource.cleanup pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.clone.custom-id pcmk.resource.clone.meta-in-create pcmk.resource.clone.wait pcmk.resource.config.output-formats pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.group.future pcmk.resource.create.in-existing-bundle pcmk.resource.create.meta pcmk.resource.create.meta.future pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.debug pcmk.resource.delete pcmk.resource.disable.safe pcmk.resource.disable.safe.brief pcmk.resource.disable.safe.tag pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.tag pcmk.resource.enable-disable.wait pcmk.resource.failcount pcmk.resource.group pcmk.resource.group.add-remove-list pcmk.resource.group.wait pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.move.autoclean.default pcmk.resource.promotable pcmk.resource.promotable.custom-id pcmk.resource.promotable.meta-in-create pcmk.resource.promotable.wait pcmk.resource.refresh pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.relations pcmk.resource.relocate pcmk.resource.restart pcmk.resource.update pcmk.resource.update-meta pcmk.resource.update-meta.list pcmk.resource.update-meta.wait pcmk.resource.update-operations pcmk.resource.update-operations.onfail-demote pcmk.resource.update.meta pcmk.resource.update.operations pcmk.resource.update.operations.onfail-demote pcmk.resource.update.wait pcmk.resource.utilization pcmk.resource.utilization-set-list-for-resource pcmk.stonith.cleanup pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.history.cleanup pcmk.stonith.history.show pcmk.stonith.history.update pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.update pcmk.stonith.update.scsi-devices pcmk.stonith.update.scsi-devices.add-remove pcmk.stonith.update.scsi-devices.mpath pcmk.tag pcmk.tag.resources pcs.auth.client pcs.auth.client.cluster pcs.auth.client.token pcs.auth.deauth-client pcs.auth.deauth-server pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server.token pcs.cfg-in-file.cib pcs.daemon-ssl-cert.set pcs.daemon-ssl-cert.sync-to-local-cluster pcs.disaster-recovery.essentials pcs.reports.severity.deprecation pcs.request-timeout resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation sbd sbd.option-timeout-action sbd.shared-block-device status.corosync.membership status.pcmk.resources.hide-inactive status.pcmk.resources.id status.pcmk.resources.node status.pcmk.resources.orphaned status.pcmk.xml stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation TASK [fedora.linux_system_roles.ha_cluster : Parse pcs capabilities] *********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:148 Saturday 23 March 2024 12:04:02 +0000 (0:00:01.797) 0:00:24.376 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_pcs_capabilities": [ "booth", "booth.enable-authfile.unset", "cluster.config.backup-local", "cluster.config.restore-cluster", "cluster.config.restore-local", "cluster.config.uuid", "cluster.create", "cluster.create.enable", "cluster.create.local", "cluster.create.no-keys-sync", "cluster.create.separated-name-and-address", "cluster.create.start", "cluster.create.start.wait", "cluster.create.transport.knet", "cluster.create.transport.udp-udpu", "cluster.create.transport.udp-udpu.no-rrp", "cluster.destroy", "cluster.destroy.all", "cluster.report", "cluster.verify", "corosync.authkey.update", "corosync.config.get", "corosync.config.get.struct", "corosync.config.reload", "corosync.config.sync-to-local-cluster", "corosync.config.update", "corosync.link.add", "corosync.link.remove", "corosync.link.remove.list", "corosync.link.update", "corosync.qdevice", "corosync.qdevice.model.net", "corosync.quorum", "corosync.quorum.device", "corosync.quorum.device.client.model.net.certificates.local", "corosync.quorum.device.heuristics", "corosync.quorum.device.model.net", "corosync.quorum.device.model.net.options_tls_and_kaptb", "corosync.quorum.set-expected-votes-runtime", "corosync.quorum.status", "corosync.quorum.unblock", "corosync.totem.block_unlisted_ips", "corosync.uidgid", "node.add", "node.add.enable", "node.add.separated-name-and-address", "node.add.start", "node.add.start.wait", "node.attributes", "node.attributes.set-list-for-node", "node.confirm-off", "node.fence", "node.guest", "node.kill", "node.maintenance", "node.maintenance.all", "node.maintenance.list", "node.maintenance.wait", "node.remote", "node.remote.onfail-demote", "node.remove", "node.remove-from-caches", "node.remove.list", "node.standby", "node.standby.all", "node.standby.list", "node.standby.wait", "node.start-stop-enable-disable", "node.start-stop-enable-disable.all", "node.start-stop-enable-disable.list", "node.start-stop-enable-disable.start-wait", "node.utilization", "node.utilization.set-list-for-node", "pcmk.acl.enable-disable", "pcmk.acl.group", "pcmk.acl.role", "pcmk.acl.role.create-with-permissions", "pcmk.acl.role.delete-with-users-groups", "pcmk.acl.user", "pcmk.alert", "pcmk.cib.checkpoints", "pcmk.cib.checkpoints.diff", "pcmk.cib.edit", "pcmk.cib.get", "pcmk.cib.get.scope", "pcmk.cib.roles.promoted-unpromoted", "pcmk.cib.set", "pcmk.constraint.colocation.set", "pcmk.constraint.colocation.set.options", "pcmk.constraint.colocation.simple", "pcmk.constraint.colocation.simple.options", "pcmk.constraint.config.output-formats", "pcmk.constraint.hide-expired", "pcmk.constraint.location.simple", "pcmk.constraint.location.simple.options", "pcmk.constraint.location.simple.resource-regexp", "pcmk.constraint.location.simple.rule", "pcmk.constraint.location.simple.rule.node-attr-type-number", "pcmk.constraint.location.simple.rule.options", "pcmk.constraint.location.simple.rule.rule-add-remove", "pcmk.constraint.no-autocorrect", "pcmk.constraint.order.set", "pcmk.constraint.order.set.options", "pcmk.constraint.order.simple", "pcmk.constraint.order.simple.options", "pcmk.constraint.ticket.set", "pcmk.constraint.ticket.set.options", "pcmk.constraint.ticket.simple", "pcmk.constraint.ticket.simple.constraint-id", "pcmk.properties.cluster", "pcmk.properties.cluster.config.output-formats", "pcmk.properties.cluster.defaults", "pcmk.properties.cluster.describe", "pcmk.properties.cluster.describe.output-formats", "pcmk.properties.cluster.set_properties.iso8601duration", "pcmk.properties.operation-defaults", "pcmk.properties.operation-defaults.multiple", "pcmk.properties.operation-defaults.rule", "pcmk.properties.operation-defaults.rule-rsc-op", "pcmk.properties.operation-defaults.rule.hide-expired", "pcmk.properties.operation-defaults.rule.node-attr-type-number", "pcmk.properties.resource-defaults", "pcmk.properties.resource-defaults.multiple", "pcmk.properties.resource-defaults.rule", "pcmk.properties.resource-defaults.rule-rsc-op", "pcmk.properties.resource-defaults.rule.hide-expired", "pcmk.properties.resource-defaults.rule.node-attr-type-number", "pcmk.resource.ban-move-clear", "pcmk.resource.ban-move-clear.bundles", "pcmk.resource.ban-move-clear.clear-expired", "pcmk.resource.ban-move-clear.clone", "pcmk.resource.bundle", "pcmk.resource.bundle.container-docker", "pcmk.resource.bundle.container-docker.promoted-max", "pcmk.resource.bundle.container-podman", "pcmk.resource.bundle.container-podman.promoted-max", "pcmk.resource.bundle.container-rkt", "pcmk.resource.bundle.container-rkt.promoted-max", "pcmk.resource.bundle.reset", "pcmk.resource.bundle.wait", "pcmk.resource.cleanup", "pcmk.resource.cleanup.one-resource", "pcmk.resource.cleanup.strict", "pcmk.resource.clone", "pcmk.resource.clone.custom-id", "pcmk.resource.clone.meta-in-create", "pcmk.resource.clone.wait", "pcmk.resource.config.output-formats", "pcmk.resource.create", "pcmk.resource.create.clone.custom-id", "pcmk.resource.create.group.future", "pcmk.resource.create.in-existing-bundle", "pcmk.resource.create.meta", "pcmk.resource.create.meta.future", "pcmk.resource.create.no-master", "pcmk.resource.create.operations", "pcmk.resource.create.operations.onfail-demote", "pcmk.resource.create.promotable", "pcmk.resource.create.promotable.custom-id", "pcmk.resource.create.wait", "pcmk.resource.debug", "pcmk.resource.delete", "pcmk.resource.disable.safe", "pcmk.resource.disable.safe.brief", "pcmk.resource.disable.safe.tag", "pcmk.resource.disable.simulate", "pcmk.resource.disable.simulate.brief", "pcmk.resource.disable.simulate.tag", "pcmk.resource.enable-disable", "pcmk.resource.enable-disable.list", "pcmk.resource.enable-disable.tag", "pcmk.resource.enable-disable.wait", "pcmk.resource.failcount", "pcmk.resource.group", "pcmk.resource.group.add-remove-list", "pcmk.resource.group.wait", "pcmk.resource.manage-unmanage", "pcmk.resource.manage-unmanage.list", "pcmk.resource.manage-unmanage.tag", "pcmk.resource.manage-unmanage.with-monitor", "pcmk.resource.move.autoclean", "pcmk.resource.move.autoclean.default", "pcmk.resource.promotable", "pcmk.resource.promotable.custom-id", "pcmk.resource.promotable.meta-in-create", "pcmk.resource.promotable.wait", "pcmk.resource.refresh", "pcmk.resource.refresh.one-resource", "pcmk.resource.refresh.strict", "pcmk.resource.relations", "pcmk.resource.relocate", "pcmk.resource.restart", "pcmk.resource.update", "pcmk.resource.update-meta", "pcmk.resource.update-meta.list", "pcmk.resource.update-meta.wait", "pcmk.resource.update-operations", "pcmk.resource.update-operations.onfail-demote", "pcmk.resource.update.meta", "pcmk.resource.update.operations", "pcmk.resource.update.operations.onfail-demote", "pcmk.resource.update.wait", "pcmk.resource.utilization", "pcmk.resource.utilization-set-list-for-resource", "pcmk.stonith.cleanup", "pcmk.stonith.cleanup.one-resource", "pcmk.stonith.cleanup.strict", "pcmk.stonith.create", "pcmk.stonith.create.in-group", "pcmk.stonith.create.meta", "pcmk.stonith.create.operations", "pcmk.stonith.create.operations.onfail-demote", "pcmk.stonith.create.wait", "pcmk.stonith.delete", "pcmk.stonith.enable-disable", "pcmk.stonith.enable-disable.list", "pcmk.stonith.enable-disable.wait", "pcmk.stonith.history.cleanup", "pcmk.stonith.history.show", "pcmk.stonith.history.update", "pcmk.stonith.levels", "pcmk.stonith.levels.add-remove-devices-list", "pcmk.stonith.levels.clear", "pcmk.stonith.levels.node-attr", "pcmk.stonith.levels.node-regexp", "pcmk.stonith.levels.verify", "pcmk.stonith.refresh", "pcmk.stonith.refresh.one-resource", "pcmk.stonith.refresh.strict", "pcmk.stonith.update", "pcmk.stonith.update.scsi-devices", "pcmk.stonith.update.scsi-devices.add-remove", "pcmk.stonith.update.scsi-devices.mpath", "pcmk.tag", "pcmk.tag.resources", "pcs.auth.client", "pcs.auth.client.cluster", "pcs.auth.client.token", "pcs.auth.deauth-client", "pcs.auth.deauth-server", "pcs.auth.no-bidirectional", "pcs.auth.separated-name-and-address", "pcs.auth.server.token", "pcs.cfg-in-file.cib", "pcs.daemon-ssl-cert.set", "pcs.daemon-ssl-cert.sync-to-local-cluster", "pcs.disaster-recovery.essentials", "pcs.reports.severity.deprecation", "pcs.request-timeout", "resource-agents.describe", "resource-agents.list", "resource-agents.list.detailed", "resource-agents.ocf.version-1-0", "resource-agents.ocf.version-1-1", "resource-agents.self-validation", "sbd", "sbd.option-timeout-action", "sbd.shared-block-device", "status.corosync.membership", "status.pcmk.resources.hide-inactive", "status.pcmk.resources.id", "status.pcmk.resources.node", "status.pcmk.resources.orphaned", "status.pcmk.xml", "stonith-agents.describe", "stonith-agents.list", "stonith-agents.list.detailed", "stonith-agents.ocf.version-1-0", "stonith-agents.ocf.version-1-1", "stonith-agents.self-validation" ], "__ha_cluster_pcsd_capabilities_available": true }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcsd capabilities] ********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:155 Saturday 23 March 2024 12:04:02 +0000 (0:00:00.018) 0:00:24.394 ******** ok: [sut] => { "changed": false, "cmd": [ "pcsd", "--version", "--full" ], "delta": "0:00:01.116126", "end": "2024-03-23 12:04:03.796815", "rc": 0, "start": "2024-03-23 12:04:02.680689" } STDOUT: 0.11.7 booth.get-config booth.set-config booth.set-config.multiple cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all corosync.config.get corosync.config.set corosync.qdevice.model.net.certificates corosync.quorum.device.client corosync.quorum.device.client.model.net.certificates corosync.quorum.device.client.model.net.certificates.local corosync.quorum.device.client.model.net.certificates.rest-api.v2 corosync.quorum.status corosync.totem.block_unlisted_ips node.add node.add.enable node.add.list node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.stop-component node.utilization pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.role.delete-with-users-groups-implicit pcmk.acl.user pcmk.alert pcmk.cib.get pcmk.cib.remove_elements.constraints pcmk.cib.remove_elements.rest-api.v2 pcmk.cib.roles.promoted-unpromoted pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.config.output-formats pcmk.constraint.location.simple pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.cluster.describe pcmk.properties.cluster.describe.rest-api.v2 pcmk.properties.cluster.get_properties.rest-api.v2 pcmk.properties.cluster.set_properties.iso8601duration pcmk.properties.cluster.set_properties.rest-api.v2 pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.bundles pcmk.resource.ban-move-clear.clear-expired pcmk.resource.ban-move-clear.clone pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.meta pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.delete pcmk.resource.delete.list pcmk.resource.disable.safe pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.wait pcmk.resource.group pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.promotable pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.update pcmk.resource.update-meta pcmk.resource.utilization pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.delete.list pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.scsi-unfence-node-mpath pcmk.stonith.scsi-unfence-node-v2 pcmk.stonith.update pcs.auth.export-cluster-known-hosts pcs.auth.known-host-change pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server pcs.automatic-pcs-configs-sync pcs.daemon-ssl-cert.set pcs.permissions pcs.rest-api.v1.1 pcs.rest-api.v2 resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation resource-agents.simplified-query-api sbd sbd-node sbd-node.shared-block-device sbd.option-timeout-action status.pcmk.local-node stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation TASK [fedora.linux_system_roles.ha_cluster : Parse pcsd capabilities] ********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:163 Saturday 23 March 2024 12:04:03 +0000 (0:00:01.459) 0:00:25.854 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_pcsd_capabilities": [ "booth.get-config", "booth.set-config", "booth.set-config.multiple", "cluster.config.restore-local", "cluster.config.uuid", "cluster.create", "cluster.create.enable", "cluster.create.no-keys-sync", "cluster.create.separated-name-and-address", "cluster.create.start", "cluster.create.start.wait", "cluster.create.transport.knet", "cluster.create.transport.udp-udpu", "cluster.create.transport.udp-udpu.no-rrp", "cluster.destroy", "cluster.destroy.all", "corosync.config.get", "corosync.config.set", "corosync.qdevice.model.net.certificates", "corosync.quorum.device.client", "corosync.quorum.device.client.model.net.certificates", "corosync.quorum.device.client.model.net.certificates.local", "corosync.quorum.device.client.model.net.certificates.rest-api.v2", "corosync.quorum.status", "corosync.totem.block_unlisted_ips", "node.add", "node.add.enable", "node.add.list", "node.add.separated-name-and-address", "node.add.start", "node.add.start.wait", "node.attributes", "node.maintenance", "node.maintenance.all", "node.maintenance.list", "node.maintenance.wait", "node.remove", "node.remove-from-caches", "node.remove.list", "node.standby", "node.standby.all", "node.standby.list", "node.standby.wait", "node.start-stop-enable-disable", "node.start-stop-enable-disable.all", "node.start-stop-enable-disable.stop-component", "node.utilization", "pcmk.acl.group", "pcmk.acl.role", "pcmk.acl.role.create-with-permissions", "pcmk.acl.role.delete-with-users-groups", "pcmk.acl.role.delete-with-users-groups-implicit", "pcmk.acl.user", "pcmk.alert", "pcmk.cib.get", "pcmk.cib.remove_elements.constraints", "pcmk.cib.remove_elements.rest-api.v2", "pcmk.cib.roles.promoted-unpromoted", "pcmk.constraint.colocation.set", "pcmk.constraint.colocation.set.options", "pcmk.constraint.colocation.simple", "pcmk.constraint.config.output-formats", "pcmk.constraint.location.simple", "pcmk.constraint.location.simple.rule", "pcmk.constraint.location.simple.rule.node-attr-type-number", "pcmk.constraint.no-autocorrect", "pcmk.constraint.order.set", "pcmk.constraint.order.set.options", "pcmk.constraint.order.simple", "pcmk.constraint.ticket.set", "pcmk.constraint.ticket.set.options", "pcmk.constraint.ticket.simple", "pcmk.constraint.ticket.simple.constraint-id", "pcmk.properties.cluster", "pcmk.properties.cluster.describe", "pcmk.properties.cluster.describe.rest-api.v2", "pcmk.properties.cluster.get_properties.rest-api.v2", "pcmk.properties.cluster.set_properties.iso8601duration", "pcmk.properties.cluster.set_properties.rest-api.v2", "pcmk.resource.ban-move-clear", "pcmk.resource.ban-move-clear.bundles", "pcmk.resource.ban-move-clear.clear-expired", "pcmk.resource.ban-move-clear.clone", "pcmk.resource.cleanup.one-resource", "pcmk.resource.cleanup.strict", "pcmk.resource.clone", "pcmk.resource.create", "pcmk.resource.create.clone.custom-id", "pcmk.resource.create.meta", "pcmk.resource.create.no-master", "pcmk.resource.create.operations", "pcmk.resource.create.operations.onfail-demote", "pcmk.resource.create.promotable", "pcmk.resource.create.promotable.custom-id", "pcmk.resource.create.wait", "pcmk.resource.delete", "pcmk.resource.delete.list", "pcmk.resource.disable.safe", "pcmk.resource.disable.simulate", "pcmk.resource.disable.simulate.brief", "pcmk.resource.disable.simulate.tag", "pcmk.resource.enable-disable", "pcmk.resource.enable-disable.list", "pcmk.resource.enable-disable.wait", "pcmk.resource.group", "pcmk.resource.manage-unmanage", "pcmk.resource.manage-unmanage.list", "pcmk.resource.manage-unmanage.tag", "pcmk.resource.manage-unmanage.with-monitor", "pcmk.resource.move.autoclean", "pcmk.resource.promotable", "pcmk.resource.refresh.one-resource", "pcmk.resource.refresh.strict", "pcmk.resource.update", "pcmk.resource.update-meta", "pcmk.resource.utilization", "pcmk.stonith.cleanup.one-resource", "pcmk.stonith.cleanup.strict", "pcmk.stonith.create", "pcmk.stonith.create.in-group", "pcmk.stonith.create.meta", "pcmk.stonith.create.operations", "pcmk.stonith.create.operations.onfail-demote", "pcmk.stonith.create.wait", "pcmk.stonith.delete", "pcmk.stonith.delete.list", "pcmk.stonith.enable-disable", "pcmk.stonith.enable-disable.list", "pcmk.stonith.enable-disable.wait", "pcmk.stonith.levels", "pcmk.stonith.levels.add-remove-devices-list", "pcmk.stonith.levels.clear", "pcmk.stonith.levels.node-attr", "pcmk.stonith.levels.node-regexp", "pcmk.stonith.levels.verify", "pcmk.stonith.refresh.one-resource", "pcmk.stonith.refresh.strict", "pcmk.stonith.scsi-unfence-node-mpath", "pcmk.stonith.scsi-unfence-node-v2", "pcmk.stonith.update", "pcs.auth.export-cluster-known-hosts", "pcs.auth.known-host-change", "pcs.auth.no-bidirectional", "pcs.auth.separated-name-and-address", "pcs.auth.server", "pcs.automatic-pcs-configs-sync", "pcs.daemon-ssl-cert.set", "pcs.permissions", "pcs.rest-api.v1.1", "pcs.rest-api.v2", "resource-agents.describe", "resource-agents.list", "resource-agents.list.detailed", "resource-agents.ocf.version-1-0", "resource-agents.ocf.version-1-1", "resource-agents.self-validation", "resource-agents.simplified-query-api", "sbd", "sbd-node", "sbd-node.shared-block-device", "sbd.option-timeout-action", "status.pcmk.local-node", "stonith-agents.describe", "stonith-agents.list", "stonith-agents.list.detailed", "stonith-agents.ocf.version-1-0", "stonith-agents.ocf.version-1-1", "stonith-agents.self-validation" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if pcs is to old to configure resources and operations defaults] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:172 Saturday 23 March 2024 12:04:03 +0000 (0:00:00.023) 0:00:25.877 ******** skipping: [sut] => { "changed": false, "false_condition": "( ha_cluster_resource_defaults and not 'pcmk.properties.resource-defaults.multiple' in __ha_cluster_pcs_capabilities ) or ( ha_cluster_resource_operation_defaults and not 'pcmk.properties.operation-defaults.multiple' in __ha_cluster_pcs_capabilities )", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set hacluster password] *********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:22 Saturday 23 March 2024 12:04:03 +0000 (0:00:00.009) 0:00:25.887 ******** changed: [sut] => { "append": false, "changed": true, "comment": "cluster user", "group": 189, "home": "/var/lib/pacemaker", "move_home": false, "name": "hacluster", "password": "NOT_LOGGING_PASSWORD", "shell": "/sbin/nologin", "state": "present", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Configure shell] ****************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:29 Saturday 23 March 2024 12:04:04 +0000 (0:00:00.579) 0:00:26.467 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Stop pcsd] ************************ task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:6 Saturday 23 March 2024 12:04:04 +0000 (0:00:00.028) 0:00:26.495 ******** ok: [sut] => { "changed": false, "name": "pcsd", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "pcsd-ruby.service basic.target system.slice network-online.target systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "\"man:pcsd(8)\" \"man:pcs(8)\"", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "pcsd-ruby.service system.slice network-online.target sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4431", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Regenerate pcsd TLS certificate and key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:11 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.666) 0:00:27.161 ******** skipping: [sut] => (item=/var/lib/pcsd/pcsd.key) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.key", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/var/lib/pcsd/pcsd.crt) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.crt", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Get the stat of /var/lib/pcsd] **** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:25 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.010) 0:00:27.172 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Allow certmonger to write into pcsd's certificate directory] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:30 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.016) 0:00:27.189 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [Ensure the name of ha_cluster_pcsd_certificates is /var/lib/pcsd/pcsd; Create certificates using the certificate role] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:37 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.017) 0:00:27.206 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set pcsd's certificate directory back to cluster_var_lib_t] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:49 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.019) 0:00:27.226 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS private key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:64 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.019) 0:00:27.245 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS certificate] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:71 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.019) 0:00:27.265 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:79 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.019) 0:00:27.285 ******** changed: [sut] => { "changed": true, "checksum": "9bbea634a798cf0976b80cd3c4e34aca6a6a7d44", "dest": "/var/lib/pcsd/pcs_settings.conf", "gid": 0, "group": "root", "md5sum": "4b74001d21d3867563d0c773bde32b42", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cluster_var_lib_t:s0", "size": 361, "src": "/root/.ansible/tmp/ansible-tmp-1711195445.296222-35189-61572008101677/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 Saturday 23 March 2024 12:04:05 +0000 (0:00:00.696) 0:00:27.981 ******** changed: [sut] => { "changed": true, "enabled": true, "name": "pcsd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice sysinit.target network-online.target pcsd-ruby.service systemd-journald.socket basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "\"man:pcsd(8)\" \"man:pcs(8)\"", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target network-online.target pcsd-ruby.service", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4431", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Configure firewall] *************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:35 Saturday 23 March 2024 12:04:12 +0000 (0:00:07.003) 0:00:34.985 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml for sut TASK [Ensure the service and the ports status with the firewall role] ********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml:3 Saturday 23 March 2024 12:04:12 +0000 (0:00:00.025) 0:00:35.010 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure selinux] **************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:38 Saturday 23 March 2024 12:04:13 +0000 (0:00:00.024) 0:00:35.034 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Populate service facts] *********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3 Saturday 23 March 2024 12:04:13 +0000 (0:00:00.026) 0:00:35.061 ******** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Set the fence-virt/fence-agents port to _ha_cluster_selinux] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:12 Saturday 23 March 2024 12:04:15 +0000 (0:00:02.666) 0:00:37.728 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Get associated selinux ports] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:20 Saturday 23 March 2024 12:04:15 +0000 (0:00:00.024) 0:00:37.752 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Add the high-availability service ports to _ha_cluster_selinux] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:28 Saturday 23 March 2024 12:04:15 +0000 (0:00:00.024) 0:00:37.777 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [Ensure the service and the ports status with the selinux role] *********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:35 Saturday 23 March 2024 12:04:15 +0000 (0:00:00.023) 0:00:37.800 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Install cluster packages] ********* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:44 Saturday 23 March 2024 12:04:15 +0000 (0:00:00.025) 0:00:37.826 ******** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: fence-agents-heuristics-ping-4.13.1-1.fc38.noarch", "Installed: fence-agents-hpblade-4.13.1-1.fc38.noarch", "Installed: fence-agents-ibm-powervs-4.13.1-1.fc38.noarch", "Installed: fence-agents-ibm-vpc-4.13.1-1.fc38.noarch", "Installed: fence-agents-ibmblade-4.13.1-1.fc38.noarch", "Installed: fence-agents-ibmz-4.13.1-1.fc38.noarch", "Installed: fence-agents-ifmib-4.13.1-1.fc38.noarch", "Installed: fence-agents-ilo-moonshot-4.13.1-1.fc38.noarch", "Installed: fence-agents-ilo-mp-4.13.1-1.fc38.noarch", "Installed: fence-agents-ilo-ssh-4.13.1-1.fc38.noarch", "Installed: fence-agents-ilo2-4.13.1-1.fc38.noarch", "Installed: libgrss-0.7.0-16.fc38.x86_64", "Installed: libsbc-2.0-2.fc38.x86_64", "Installed: fence-agents-intelmodular-4.13.1-1.fc38.noarch", "Installed: fence-agents-ipdu-4.13.1-1.fc38.noarch", "Installed: fence-agents-ipmilan-4.13.1-1.fc38.noarch", "Installed: fence-agents-kdump-4.13.1-1.fc38.x86_64", "Installed: fence-agents-ldom-4.13.1-1.fc38.noarch", "Installed: fence-agents-lpar-4.13.1-1.fc38.noarch", "Installed: fence-agents-mpath-4.13.1-1.fc38.noarch", "Installed: fence-agents-netio-4.13.1-1.fc38.noarch", "Installed: fence-agents-openstack-4.13.1-1.fc38.noarch", "Installed: fence-agents-ironic-4.13.1-1.fc38.noarch", "Installed: sbd-1.5.1-2.fc38.2.x86_64", "Installed: fence-agents-ovh-4.13.1-1.fc38.noarch", "Installed: fence-agents-redfish-4.13.1-1.fc38.x86_64", "Installed: fence-agents-rhevm-4.13.1-1.fc38.noarch", "Installed: fence-agents-rsa-4.13.1-1.fc38.noarch", "Installed: fence-agents-rsb-4.13.1-1.fc38.noarch", "Installed: fence-agents-sanbox2-4.13.1-1.fc38.noarch", "Installed: fence-agents-sbd-4.13.1-1.fc38.noarch", "Installed: fence-agents-scsi-4.13.1-1.fc38.noarch", "Installed: fence-agents-vbox-4.13.1-1.fc38.noarch", "Installed: telnet-1:0.17-88.fc38.x86_64", "Installed: fence-agents-vmware-4.13.1-1.fc38.noarch", "Installed: fence-agents-vmware-rest-4.13.1-1.fc38.noarch", "Installed: fence-agents-vmware-soap-4.13.1-1.fc38.noarch", "Installed: fence-agents-vmware-vcloud-4.13.1-1.fc38.noarch", "Installed: fence-agents-wti-4.13.1-1.fc38.noarch", "Installed: fence-agents-xenapi-4.13.1-1.fc38.noarch", "Installed: fence-agents-zvm-4.13.1-1.fc38.noarch", "Installed: fdk-aac-free-2.0.0-10.fc38.x86_64", "Installed: fence-virt-4.13.1-1.fc38.x86_64", "Installed: perl-Net-SMTP-SSL-1.04-20.fc38.noarch", "Installed: at-spi2-atk-2.48.4-1.fc38.x86_64", "Installed: at-spi2-core-2.48.4-1.fc38.x86_64", "Installed: atk-2.48.4-1.fc38.x86_64", "Installed: python3-msal-extensions-1.0.0-4.fc38.noarch", "Installed: python3-msgpack-1.0.4-4.fc38.x86_64", "Installed: python3-msrest-0.7.1-2.20221014git2d8fd04.fc38.noarch", "Installed: python3-msrestazure-0.6.4-15.fc38.noarch", "Installed: dconf-0.40.0-8.fc38.x86_64", "Installed: libgxps-0.3.2-5.fc38.x86_64", "Installed: mailcap-2.1.53-5.fc38.noarch", "Installed: amtterm-1.6-17.fc38.x86_64", "Installed: flac-libs-1.4.3-1.fc38.x86_64", "Installed: python3-netaddr-0.8.0-11.fc38.noarch", "Installed: lcms2-2.15-1.fc38.x86_64", "Installed: python3-adal-1.2.7-10.fc38.noarch", "Installed: libnotify-0.8.3-1.fc38.x86_64", "Installed: avahi-glib-0.8-22.fc38.x86_64", "Installed: net-snmp-utils-1:5.9.4-1.fc38.x86_64", "Installed: python3-novaclient-1:18.1.0-2.fc38.noarch", "Installed: libicu-72.1-2.fc38.x86_64", "Installed: fontconfig-2.14.2-2.fc38.x86_64", "Installed: libsndfile-1.1.0-6.fc38.x86_64", "Installed: gnutls-dane-3.8.3-1.fc38.x86_64", "Installed: libsoup-2.74.3-2.fc38.x86_64", "Installed: gnutls-utils-3.8.3-1.fc38.x86_64", "Installed: libosinfo-1.11.0-1.fc38.x86_64", "Installed: desktop-file-utils-0.26-8.fc38.x86_64", "Installed: device-mapper-multipath-0.9.4-2.fc38.x86_64", "Installed: device-mapper-multipath-libs-0.9.4-2.fc38.x86_64", "Installed: perl-Class-Inspector-1.36-13.fc38.noarch", "Installed: fuse-common-3.14.1-1.fc38.x86_64", "Installed: fuse3-3.14.1-1.fc38.x86_64", "Installed: fuse3-libs-3.14.1-1.fc38.x86_64", "Installed: libXau-1.0.11-2.fc38.x86_64", "Installed: perl-Clone-0.46-2.fc38.x86_64", "Installed: python3-azure-core-2:1.28.0-2.fc38.noarch", "Installed: perl-PerlIO-utf8_strict-0.010-2.fc38.x86_64", "Installed: cdparanoia-libs-10.2-41.fc38.x86_64", "Installed: python3-azure-mgmt-compute-1:30.3.0-2.fc38.noarch", "Installed: libXcomposite-0.4.5-9.fc38.x86_64", "Installed: libiptcdata-1.0.5-13.fc38.x86_64", "Installed: python3-azure-common-1:1.1.28-2.fc38.noarch", "Installed: libXcursor-1.2.1-3.fc38.x86_64", "Installed: perl-Compress-Raw-Bzip2-2.204-2.fc38.x86_64", "Installed: libXdamage-1.1.5-9.fc38.x86_64", "Installed: perl-Compress-Raw-Zlib-2.204-2.fc38.x86_64", "Installed: python3-azure-identity-1:1.10.0-4.fc38.noarch", "Installed: libXext-1.3.5-2.fc38.x86_64", "Installed: libXfixes-6.0.0-5.fc38.x86_64", "Installed: libstemmer-2.2.0-5.fc38.x86_64", "Installed: harfbuzz-7.1.0-1.fc38.x86_64", "Installed: python3-boto3-1.28.21-1.fc38.noarch", "Installed: python3-azure-mgmt-core-1:1.3.2-2.fc38.noarch", "Installed: python3-botocore-1.31.21-1.fc38.noarch", "Installed: libXinerama-1.1.5-2.fc38.x86_64", "Installed: python3-os-service-types-1.7.0-12.fc38.noarch", "Installed: libXrandr-1.5.2-10.fc38.x86_64", "Installed: perl-Convert-BinHex-1.125-23.fc38.noarch", "Installed: python3-oslo-i18n-5.1.0-4.fc38.noarch", "Installed: libXrender-0.9.11-2.fc38.x86_64", "Installed: mpg123-libs-1.31.3-1.fc38.x86_64", "Installed: python3-azure-mgmt-network-21.0.1-2.fc38.noarch", "Installed: python3-oslo-serialization-5.0.0-2.fc38.noarch", "Installed: python3-oslo-utils-6.0.1-2.fc38.noarch", "Installed: libXtst-1.2.4-2.fc38.x86_64", "Installed: sg3_utils-1.46-5.fc38.x86_64", "Installed: libXv-1.0.11-18.fc38.x86_64", "Installed: libjpeg-turbo-2.1.4-2.fc38.x86_64", "Installed: sg3_utils-libs-1.46-5.fc38.x86_64", "Installed: python3-packaging-23.0-1.fc38.noarch", "Installed: libXxf86vm-1.1.5-2.fc38.x86_64", "Installed: libthai-0.1.29-4.fc38.x86_64", "Installed: pixman-0.42.2-1.fc38.x86_64", "Installed: libtheora-1:1.1.1-33.fc38.x86_64", "Installed: bluez-libs-5.72-1.fc38.x86_64", "Installed: python3-pbr-5.10.0-2.fc38.noarch", "Installed: perl-SOAP-Lite-1.27-17.fc38.noarch", "Installed: python3-pexpect-4.8.0-14.fc38.noarch", "Installed: hicolor-icon-theme-0.17-15.fc38.noarch", "Installed: perl-Data-Dump-1.25-6.fc38.noarch", "Installed: python3-cachetools-5.3.0-1.fc38.noarch", "Installed: python3-portalocker-2.7.0-1.fc38.noarch", "Installed: webrtc-audio-processing-0.3.1-10.fc38.x86_64", "Installed: libasyncns-0.8-24.fc38.x86_64", "Installed: libldac-2.0.2.3-12.fc38.x86_64", "Installed: python3-certifi-2022.09.24-2.fc38.noarch", "Installed: python3-protobuf-3.19.6-2.fc38.x86_64", "Installed: liblerc-4.0.0-3.fc38.x86_64", "Installed: python3-ptyprocess-0.7.0-3.fc38.noarch", "Installed: libsoup3-3.4.4-1.fc38.x86_64", "Installed: python3-google-api-client-2:2.120.0-1.fc38.noarch", "Installed: python3-google-api-core-1:2.11.1-6.fc38.noarch", "Installed: python3-google-auth-1:2.28.2-1.fc38.noarch", "Installed: python3-google-auth-httplib2-0.1.1-1.fc38.noarch", "Installed: openjpeg2-2.5.2-1.fc38.x86_64", "Installed: geoclue2-2.7.0-1.fc38.x86_64", "Installed: cairo-1.17.8-4.fc38.x86_64", "Installed: libunwind-1.6.2-7.fc38.x86_64", "Installed: cairo-gobject-1.17.8-4.fc38.x86_64", "Installed: python3-googleapis-common-protos-1.63.0-1.fc38.noarch", "Installed: python3-grpcio-1.48.4-8.fc38.x86_64", "Installed: python3-grpcio-status-1.48.4-8.fc38.noarch", "Installed: libtiff-4.4.0-8.fc38.x86_64", "Installed: perl-Digest-HMAC-1.04-7.fc38.noarch", "Installed: perl-Digest-SHA-1:6.04-1.fc38.x86_64", "Installed: libtracker-sparql-3.5.3-2.fc38.x86_64", "Installed: freetype-2.13.0-2.fc38.x86_64", "Installed: perl-Email-Date-Format-1.008-1.fc38.noarch", "Installed: fribidi-1.0.12-3.fc38.x86_64", "Installed: python3-debtcollector-2.5.0-4.fc38.noarch", "Installed: osinfo-db-20231215-1.fc38.noarch", "Installed: perl-Encode-Locale-1.05-26.fc38.noarch", "Installed: osinfo-db-tools-1.11.0-1.fc38.x86_64", "Installed: libcanberra-0.30-31.fc38.x86_64", "Installed: libvisual-1:0.4.1-1.fc38.x86_64", "Installed: libcanberra-gtk3-0.30-31.fc38.x86_64", "Installed: libwayland-client-1.22.0-1.fc38.x86_64", "Installed: libwayland-cursor-1.22.0-1.fc38.x86_64", "Installed: libwayland-egl-1.22.0-1.fc38.x86_64", "Installed: libvorbis-1:1.3.7-7.fc38.x86_64", "Installed: libwayland-server-1.22.0-1.fc38.x86_64", "Installed: libwebp-1.3.2-2.fc38.x86_64", "Installed: python3-msal-1.24.1-2.fc38.noarch", "Installed: poppler-data-0.4.11-4.fc38.noarch", "Installed: sound-theme-freedesktop-0.8-19.fc38.noarch", "Installed: perl-File-Listing-6.15-5.fc38.noarch", "Installed: colord-libs-1.4.6-4.fc38.x86_64", "Installed: fence-agents-gce-4.13.1-1.fc38.noarch", "Installed: libwsman1-2.7.1-10.fc38.x86_64", "Installed: perl-File-Slurper-0.014-2.fc38.noarch", "Installed: libxcb-1.13.1-11.fc38.x86_64", "Installed: grpc-data-1.48.4-8.fc38.noarch", "Installed: xdg-desktop-portal-1.16.0-3.fc38.x86_64", "Installed: xdg-desktop-portal-gtk-1.14.1-2.fc38.x86_64", "Installed: gstreamer1-1.22.9-1.fc38.x86_64", "Installed: perl-Time-HiRes-4:1.9770-490.fc38.x86_64", "Installed: llvm-libs-16.0.6-3.fc38.x86_64", "Installed: openwsman-python3-2.7.1-10.fc38.x86_64", "Installed: python3-requests-oauthlib-1.3.1-4.fc38.noarch", "Installed: gstreamer1-plugins-base-1.22.9-1.fc38.x86_64", "Installed: libxshmfence-1.3-12.fc38.x86_64", "Installed: opus-1.3.1-12.fc38.x86_64", "Installed: libdatrie-0.2.13-5.fc38.x86_64", "Installed: orc-0.4.33-2.fc38.x86_64", "Installed: perl-Try-Tiny-0.31-5.fc38.noarch", "Installed: tracker-3.5.3-2.fc38.x86_64", "Installed: tracker-miners-3.5.4-1.fc38.x86_64", "Installed: libogg-2:1.3.5-5.fc38.x86_64", "Installed: gtk-update-icon-cache-3.24.41-1.fc38.x86_64", "Installed: gtk3-3.24.41-1.fc38.x86_64", "Installed: python3-pyasn1-0.5.1-1.fc38.noarch", "Installed: python3-pyasn1-modules-0.5.1-1.fc38.noarch", "Installed: totem-pl-parser-3.26.6-6.fc38.x86_64", "Installed: perl-File-Copy-2.39-498.fc38.noarch", "Installed: perl-HTML-Parser-3.81-1.fc38.x86_64", "Installed: perl-HTML-Tagset-3.20-53.fc38.noarch", "Installed: perl-HTTP-Date-6.06-1.fc38.noarch", "Installed: perl-WWW-RobotRules-6.02-36.fc38.noarch", "Installed: xml-common-0.6.3-60.fc38.noarch", "Installed: protobuf-3.19.6-2.fc38.x86_64", "Installed: perl-I18N-Langinfo-0.21-498.fc38.x86_64", "Installed: perl-HTTP-Cookies-6.10-9.fc38.noarch", "Installed: cups-libs-1:2.4.7-11.fc38.x86_64", "Installed: perl-HTTP-Message-6.44-2.fc38.noarch", "Installed: perl-HTTP-Negotiate-6.01-35.fc38.noarch", "Installed: re2-1:20220601-2.fc38.x86_64", "Installed: python3-pytz-2024.1-1.fc38.noarch", "Installed: libX11-1.8.7-1.fc38.x86_64", "Installed: libX11-common-1.8.7-1.fc38.noarch", "Installed: libX11-xcb-1.8.7-1.fc38.x86_64", "Installed: perl-Net-HTTP-6.23-1.fc38.noarch", "Installed: perl-XML-Parser-2.46-13.fc38.x86_64", "Installed: libXft-2.3.8-2.fc38.x86_64", "Installed: libXi-1.8.1-1.fc38.x86_64", "Installed: perl-IO-Compress-2.204-1.fc38.noarch", "Installed: perl-IO-Compress-Brotli-0.004001-7.fc38.x86_64", "Installed: perl-IO-HTML-1.004-10.fc38.noarch", "Installed: perl-IO-SessionData-1.03-27.fc38.noarch", "Installed: libpciaccess-0.16-8.fc38.x86_64", "Installed: python3-rsa-4.9-2.fc38.noarch", "Installed: python3-s3transfer-0.6.2-1.fc38.noarch", "Installed: hwdata-0.380-1.fc38.noarch", "Installed: perl-Test-1.31-498.fc38.noarch", "Installed: pulseaudio-libs-16.1-4.fc38.x86_64", "Installed: ipmitool-1.8.19-2.fc38.x86_64", "Installed: pango-1.50.14-1.fc38.x86_64", "Installed: libpng-2:1.6.37-14.fc38.x86_64", "Installed: uchardet-0.0.8-2.fc38.x86_64", "Installed: mesa-dri-drivers-23.1.9-1.fc38.x86_64", "Installed: mesa-filesystem-23.1.9-1.fc38.x86_64", "Installed: mesa-libEGL-23.1.9-1.fc38.x86_64", "Installed: xprop-1.2.5-3.fc38.x86_64", "Installed: mesa-libGL-23.1.9-1.fc38.x86_64", "Installed: perl-libwww-perl-6.68-1.fc38.noarch", "Installed: perl-subs-1.04-498.fc38.noarch", "Installed: libepoxy-1.5.10-3.fc38.x86_64", "Installed: perl-LWP-MediaTypes-6.04-14.fc38.noarch", "Installed: mesa-libgbm-23.1.9-1.fc38.x86_64", "Installed: mesa-libglapi-23.1.9-1.fc38.x86_64", "Installed: perl-LWP-Protocol-https-6.10-9.fc38.noarch", "Installed: mesa-va-drivers-23.1.9-1.fc38.x86_64", "Installed: python3-httplib2-0.20.4-8.fc38.noarch", "Installed: gdk-pixbuf2-2.42.10-2.fc38.x86_64", "Installed: python3-stevedore-4.0.2-2.fc38.noarch", "Installed: gdk-pixbuf2-modules-2.42.10-2.fc38.x86_64", "Installed: libproxy-0.4.18-6.fc38.x86_64", "Installed: libproxy-duktape-0.4.18-6.fc38.x86_64", "Installed: iso-codes-4.13.0-1.fc38.noarch", "Installed: libcloudproviders-0.3.5-1.fc38.x86_64", "Installed: python3-suds-1.1.2-3.fc38.noarch", "Installed: libexif-0.6.24-4.fc38.x86_64", "Installed: libcue-2.2.1-13.fc38.x86_64", "Installed: python3-wrapt-1.15.0-1.fc38.x86_64", "Installed: lm_sensors-libs-3.6.0-13.fc38.x86_64", "Installed: exempi-2.6.3-2.fc38.x86_64", "Installed: exiv2-0.27.6-4.fc38.x86_64", "Installed: exiv2-libs-0.27.6-4.fc38.x86_64", "Installed: vte-profile-0.72.4-1.fc38.x86_64", "Installed: vte291-0.72.4-1.fc38.x86_64", "Installed: python3-iso8601-1.1.0-2.fc38.noarch", "Installed: python3-isodate-0.6.1-6.fc38.noarch", "Installed: perl-MIME-Lite-3.033-6.fc38.noarch", "Installed: perl-MIME-Types-2.24-2.fc38.noarch", "Installed: perl-MIME-tools-5.510-3.fc38.noarch", "Installed: libdrm-2.4.120-1.fc38.x86_64", "Installed: python3-jmespath-1.0.1-2.fc38.noarch", "Installed: perl-MailTools-2.21-13.fc38.noarch", "Installed: emacs-filesystem-1:28.3-0.rc1.fc38.noarch", "Installed: low-memory-monitor-2.1-7.fc38.x86_64", "Installed: graphene-1.10.6-5.fc38.x86_64", "Installed: graphite2-1.3.14-11.fc38.x86_64", "Installed: pipewire-1.0.3-1.fc38.x86_64", "Installed: abattis-cantarell-fonts-0.301-9.fc38.noarch", "Installed: pipewire-alsa-1.0.3-1.fc38.x86_64", "Installed: upower-0.99.20-3.fc38.x86_64", "Installed: pipewire-jack-audio-connection-kit-1.0.3-1.fc38.x86_64", "Installed: pipewire-jack-audio-connection-kit-libs-1.0.3-1.fc38.x86_64", "Installed: pipewire-libs-1.0.3-1.fc38.x86_64", "Installed: python3-jwt+crypto-2.6.0-2.fc38.noarch", "Installed: python3-jwt-2.6.0-2.fc38.noarch", "Installed: python3-typing-extensions-4.5.0-1.fc38.noarch", "Installed: pipewire-pulseaudio-1.0.3-1.fc38.x86_64", "Installed: python3-keystoneauth1-5.0.0-2.fc38.noarch", "Installed: abseil-cpp-20220623.1-4.fc38.x86_64", "Installed: python3-uritemplate-4.1.1-4.fc38.noarch", "Installed: lame-libs-3.100-14.fc38.x86_64", "Installed: giflib-5.2.2-1.fc38.x86_64", "Installed: wireplumber-0.4.17-1.fc38.x86_64", "Installed: wireplumber-libs-0.4.17-1.fc38.x86_64", "Installed: libgexiv2-0.14.2-1.fc38.x86_64", "Installed: python-oslo-i18n-lang-5.1.0-4.fc38.noarch", "Installed: adwaita-cursor-theme-44.0-1.fc38.noarch", "Installed: python-oslo-utils-lang-6.0.1-2.fc38.noarch", "Installed: adwaita-icon-theme-44.0-1.fc38.noarch", "Installed: gsettings-desktop-schemas-44.0-1.fc38.x86_64", "Installed: glib-networking-2.76.1-1.fc38.x86_64", "Installed: adobe-source-code-pro-fonts-2.042.1.062.1.026-2.fc38.noarch", "Installed: libgsf-1.14.51-1.fc38.x86_64", "Installed: xdg-utils-1.1.3-15.fc38.noarch", "Installed: gsm-1.0.22-2.fc38.x86_64", "Installed: rtkit-0.11-58.fc38.x86_64", "Installed: alsa-lib-1.2.11-2.fc38.x86_64", "Installed: poppler-23.02.0-3.fc38.x86_64", "Installed: jbigkit-libs-2.1-25.fc38.x86_64", "Installed: poppler-glib-23.02.0-3.fc38.x86_64", "Installed: libglvnd-1:1.6.0-2.fc38.x86_64", "Installed: libglvnd-egl-1:1.6.0-2.fc38.x86_64", "Installed: libglvnd-glx-1:1.6.0-2.fc38.x86_64", "Installed: perl-NTLM-1.09-34.fc38.noarch", "Installed: fence-agents-all-4.13.1-1.fc38.x86_64", "Installed: fence-agents-alom-4.13.1-1.fc38.noarch", "Installed: fence-agents-amt-4.13.1-1.fc38.noarch", "Installed: fence-agents-amt-ws-4.13.1-1.fc38.noarch", "Installed: fence-agents-apc-4.13.1-1.fc38.noarch", "Installed: fence-agents-apc-snmp-4.13.1-1.fc38.noarch", "Installed: fence-agents-aws-4.13.1-1.fc38.noarch", "Installed: fence-agents-azure-arm-4.13.1-1.fc38.noarch", "Installed: fence-agents-bladecenter-4.13.1-1.fc38.noarch", "Installed: fence-agents-brocade-4.13.1-1.fc38.noarch", "Installed: fence-agents-cdu-4.13.1-1.fc38.noarch", "Installed: fence-agents-cisco-mds-4.13.1-1.fc38.noarch", "Installed: fence-agents-cisco-ucs-4.13.1-1.fc38.noarch", "Installed: fence-agents-common-4.13.1-1.fc38.noarch", "Installed: fence-agents-compute-4.13.1-1.fc38.noarch", "Installed: fence-agents-cyberpower-ssh-4.13.1-1.fc38.noarch", "Installed: fence-agents-docker-4.13.1-1.fc38.noarch", "Installed: fence-agents-drac-4.13.1-1.fc38.noarch", "Installed: fence-agents-drac5-4.13.1-1.fc38.noarch", "Installed: fence-agents-eaton-snmp-4.13.1-1.fc38.noarch", "Installed: fence-agents-eaton-ssh-4.13.1-1.fc38.noarch", "Installed: fence-agents-ecloud-4.13.1-1.fc38.noarch", "Installed: fence-agents-emerson-4.13.1-1.fc38.noarch", "Installed: fence-agents-eps-4.13.1-1.fc38.noarch", "Installed: liblc3-1.0.4-2.fc38.x86_64", "Installed: fence-agents-hds-cb-4.13.1-1.fc38.noarch" ] } lsrpackages: corosync fence-agents-all fence-virt libknet1-plugins-all openssl pacemaker resource-agents sbd TASK [fedora.linux_system_roles.ha_cluster : Distribute fence-virt authkey] **** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:50 Saturday 23 March 2024 12:05:16 +0000 (0:01:00.912) 0:01:38.738 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Create /etc/cluster directory] **** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:3 Saturday 23 March 2024 12:05:16 +0000 (0:00:00.028) 0:01:38.767 ******** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/cluster", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 4096, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Get fence_xvm.key] **************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:9 Saturday 23 March 2024 12:05:16 +0000 (0:00:00.255) 0:01:39.023 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller fence_xvm.key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.034) 0:01:39.057 ******** skipping: [sut] => { "changed": false, "false_condition": "preshared_key_src is string and preshared_key_src | length > 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller fence_xvm.key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.025) 0:01:39.083 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller fence_xvm.key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.024) 0:01:39.107 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL fence_xvm.key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.025) 0:01:39.133 ******** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch generated fence_xvm.key] **** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.349) 0:01:39.482 ******** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes fence_xvm.key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.027) 0:01:39.509 ******** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes fence_xvm.key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.247) 0:01:39.757 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes fence_xvm.key] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.024) 0:01:39.781 ******** skipping: [sut] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Send fence_xvm.key to nodes] ****** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:17 Saturday 23 March 2024 12:05:17 +0000 (0:00:00.030) 0:01:39.812 ******** changed: [sut] => { "changed": true, "checksum": "d807a847722f5ef532f72de71c8adbb0226ed6a7", "dest": "/etc/cluster/fence_xvm.key", "gid": 0, "group": "root", "md5sum": "258ad68bea6c466546974ff657dcb0a5", "mode": "0600", "owner": "root", "secontext": "system_u:object_r:cluster_conf_t:s0", "size": 512, "src": "/root/.ansible/tmp/ansible-tmp-1711195517.8340268-35474-71357072247811/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Configure SBD] ******************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:55 Saturday 23 March 2024 12:05:18 +0000 (0:00:00.619) 0:01:40.432 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Configure watchdog kernel module blocklist] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:10 Saturday 23 March 2024 12:05:18 +0000 (0:00:00.077) 0:01:40.509 ******** changed: [sut] => (item=iTCO_wdt) => { "ansible_loop_var": "item", "backup": "", "changed": true, "item": "iTCO_wdt" } MSG: line added TASK [fedora.linux_system_roles.ha_cluster : Unload watchdog kernel modules from blocklist] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:22 Saturday 23 March 2024 12:05:18 +0000 (0:00:00.315) 0:01:40.824 ******** ok: [sut] => (item=iTCO_wdt) => { "ansible_loop_var": "item", "changed": false, "item": "iTCO_wdt", "name": "iTCO_wdt", "params": "", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Configure watchdog kernel modules] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:30 Saturday 23 March 2024 12:05:19 +0000 (0:00:00.306) 0:01:41.131 ******** changed: [sut] => (item=softdog) => { "ansible_loop_var": "item", "backup": "", "changed": true, "item": "softdog" } MSG: line added TASK [fedora.linux_system_roles.ha_cluster : Load watchdog kernel modules] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:40 Saturday 23 March 2024 12:05:19 +0000 (0:00:00.253) 0:01:41.384 ******** changed: [sut] => (item=softdog) => { "ansible_loop_var": "item", "changed": true, "item": "softdog", "name": "softdog", "params": "", "state": "present" } TASK [fedora.linux_system_roles.ha_cluster : Probe SBD devices] **************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:57 Saturday 23 March 2024 12:05:19 +0000 (0:00:00.330) 0:01:41.714 ******** ok: [sut] => (item=/dev/loop0) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "sbd", "-d", "/dev/loop0", "dump" ], "delta": "0:00:00.022814", "end": "2024-03-23 12:05:19.928046", "failed_when_result": false, "item": "/dev/loop0", "rc": 1, "start": "2024-03-23 12:05:19.905232" } STDOUT: ==Dumping header on disk /dev/loop0 STDERR: ==Header on disk /dev/loop0 NOT dumped sbd failed; please check the logs. MSG: non-zero return code TASK [fedora.linux_system_roles.ha_cluster : Initialize SBD devices] *********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:70 Saturday 23 March 2024 12:05:19 +0000 (0:00:00.268) 0:01:41.983 ******** changed: [sut] => (item={'changed': False, 'stdout': '==Dumping header on disk /dev/loop0', 'stderr': '==Header on disk /dev/loop0 NOT dumped\nsbd failed; please check the logs.', 'rc': 1, 'cmd': ['sbd', '-d', '/dev/loop0', 'dump'], 'start': '2024-03-23 12:05:19.905232', 'end': '2024-03-23 12:05:19.928046', 'delta': '0:00:00.022814', 'failed': False, 'msg': 'non-zero return code', 'invocation': {'module_args': {'_raw_params': 'sbd -d /dev/loop0 dump', '_uses_shell': False, 'expand_argument_vars': True, 'stdin_add_newline': True, 'strip_empty_ends': True, 'argv': None, 'chdir': None, 'executable': None, 'creates': None, 'removes': None, 'stdin': None}}, 'stdout_lines': ['==Dumping header on disk /dev/loop0'], 'stderr_lines': ['==Header on disk /dev/loop0 NOT dumped', 'sbd failed; please check the logs.'], 'failed_when_result': False, 'item': '/dev/loop0', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "pcs", "--force", "--", "stonith", "sbd", "device", "setup", "device=/dev/loop0", "watchdog-timeout=10", "msgwait-timeout=20" ], "delta": "0:00:00.718289", "end": "2024-03-23 12:05:20.894663", "item": { "ansible_loop_var": "item", "changed": false, "cmd": [ "sbd", "-d", "/dev/loop0", "dump" ], "delta": "0:00:00.022814", "end": "2024-03-23 12:05:19.928046", "failed": false, "failed_when_result": false, "invocation": { "module_args": { "_raw_params": "sbd -d /dev/loop0 dump", "_uses_shell": false, "argv": null, "chdir": null, "creates": null, "executable": null, "expand_argument_vars": true, "removes": null, "stdin": null, "stdin_add_newline": true, "strip_empty_ends": true } }, "item": "/dev/loop0", "msg": "non-zero return code", "rc": 1, "start": "2024-03-23 12:05:19.905232", "stderr": "==Header on disk /dev/loop0 NOT dumped\nsbd failed; please check the logs.", "stderr_lines": [ "==Header on disk /dev/loop0 NOT dumped", "sbd failed; please check the logs." ], "stdout": "==Dumping header on disk /dev/loop0", "stdout_lines": [ "==Dumping header on disk /dev/loop0" ] }, "rc": 0, "start": "2024-03-23 12:05:20.176374" } STDERR: Warning: All current content on device(s) '/dev/loop0' will be overwritten Initializing device '/dev/loop0'... Device initialized successfully TASK [fedora.linux_system_roles.ha_cluster : Distribute SBD config] ************ task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:88 Saturday 23 March 2024 12:05:20 +0000 (0:00:00.964) 0:01:42.947 ******** changed: [sut] => { "changed": true, "checksum": "39ad181708d0016205049c0ca3e7a4f1cc64fc3b", "dest": "/etc/sysconfig/sbd", "gid": 0, "group": "root", "md5sum": "ef7001fd02b0240da84bdab1135a22d1", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 4276, "src": "/root/.ansible/tmp/ansible-tmp-1711195520.971939-35496-22037696407710/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Ensure /etc/systemd/system/sbd.service.d directory exists] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:109 Saturday 23 March 2024 12:05:21 +0000 (0:00:00.653) 0:01:43.600 ******** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/systemd/system/sbd.service.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 4096, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Override start timeout for SBD] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:117 Saturday 23 March 2024 12:05:21 +0000 (0:00:00.271) 0:01:43.872 ******** changed: [sut] => { "changed": true, "checksum": "24350da703b697d9a17bf071f2946be63531ed51", "dest": "/etc/systemd/system/sbd.service.d/override-timeout.conf", "gid": 0, "group": "root", "md5sum": "f70e0cca889e5fd2ad7e555f739d0681", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:systemd_unit_file_t:s0", "size": 77, "src": "/root/.ansible/tmp/ansible-tmp-1711195521.9029276-35507-164065542203832/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Reload systemd service files] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:131 Saturday 23 March 2024 12:05:22 +0000 (0:00:00.639) 0:01:44.512 ******** ok: [sut] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.ha_cluster : Get services status - detect pacemaker] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:135 Saturday 23 March 2024 12:05:23 +0000 (0:00:00.848) 0:01:45.360 ******** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "canberra-system-bootup.service": { "name": "canberra-system-bootup.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown-reboot.service": { "name": "canberra-system-shutdown-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown.service": { "name": "canberra-system-shutdown.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "geoclue.service": { "name": "geoclue.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "low-memory-monitor.service": { "name": "low-memory-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rtkit-daemon.service": { "name": "rtkit-daemon.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sbd.service": { "name": "sbd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "sbd_remote.service": { "name": "sbd_remote.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "upower.service": { "name": "upower.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Set stonith-watchdog-timeout cluster property in CIB] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:142 Saturday 23 March 2024 12:05:26 +0000 (0:00:02.689) 0:01:48.050 ******** changed: [sut] => { "changed": true, "cmd": [ "pcs", "--force", "-f", "/var/lib/pacemaker/cib/cib.xml", "--", "property", "set", "stonith-watchdog-timeout=" ], "delta": "0:00:00.776474", "end": "2024-03-23 12:05:27.018598", "rc": 0, "start": "2024-03-23 12:05:26.242124" } STDERR: Warning: Cannot remove property 'stonith-watchdog-timeout', it is not present in property set 'cib-bootstrap-options' TASK [fedora.linux_system_roles.ha_cluster : Correct cib.xml ownership] ******** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:155 Saturday 23 March 2024 12:05:27 +0000 (0:00:01.020) 0:01:49.070 ******** changed: [sut] => { "changed": true, "gid": 189, "group": "haclient", "mode": "0600", "owner": "hacluster", "path": "/var/lib/pacemaker/cib/cib.xml", "secontext": "unconfined_u:object_r:cluster_var_lib_t:s0", "size": 275, "state": "file", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Clean cib.xml.sig] **************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:167 Saturday 23 March 2024 12:05:27 +0000 (0:00:00.256) 0:01:49.327 ******** ok: [sut] => { "changed": false, "path": "/var/lib/pacemaker/cib/cib.xml.sig", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Configure corosync] *************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:58 Saturday 23 March 2024 12:05:27 +0000 (0:00:00.244) 0:01:49.571 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf tempfile] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:3 Saturday 23 March 2024 12:05:27 +0000 (0:00:00.038) 0:01:49.610 ******** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.c0ntqx8p_ha_cluster_corosync_conf", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf file content] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:16 Saturday 23 March 2024 12:05:27 +0000 (0:00:00.231) 0:01:49.842 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf file content using pcs-0.10] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:3 Saturday 23 March 2024 12:05:27 +0000 (0:00:00.029) 0:01:49.871 ******** changed: [sut] => { "changed": true, "cmd": [ "pcs", "cluster", "setup", "--corosync_conf", "/tmp/ansible.c0ntqx8p_ha_cluster_corosync_conf", "--overwrite", "--no-cluster-uuid", "--", "test-cluster", "localhost" ], "delta": "0:00:00.657931", "end": "2024-03-23 12:05:28.732101", "rc": 0, "start": "2024-03-23 12:05:28.074170" } STDERR: Warning: Unable to read the known-hosts file: No such file or directory: '/var/lib/pcsd/known-hosts' No addresses specified for host 'localhost', using 'localhost' TASK [fedora.linux_system_roles.ha_cluster : Add qdevice configuration to corosync.conf] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:81 Saturday 23 March 2024 12:05:28 +0000 (0:00:00.913) 0:01:50.785 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_in_use", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fetch created corosync.conf file] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:19 Saturday 23 March 2024 12:05:28 +0000 (0:00:00.019) 0:01:50.805 ******** ok: [sut] => { "changed": false, "content": "dG90ZW0gewogICAgdmVyc2lvbjogMgogICAgY2x1c3Rlcl9uYW1lOiB0ZXN0LWNsdXN0ZXIKICAgIHRyYW5zcG9ydDoga25ldAogICAgY3J5cHRvX2NpcGhlcjogYWVzMjU2CiAgICBjcnlwdG9faGFzaDogc2hhMjU2Cn0KCm5vZGVsaXN0IHsKICAgIG5vZGUgewogICAgICAgIHJpbmcwX2FkZHI6IGxvY2FsaG9zdAogICAgICAgIG5hbWU6IGxvY2FsaG9zdAogICAgICAgIG5vZGVpZDogMQogICAgfQp9CgpxdW9ydW0gewogICAgcHJvdmlkZXI6IGNvcm9zeW5jX3ZvdGVxdW9ydW0KfQoKbG9nZ2luZyB7CiAgICB0b19sb2dmaWxlOiB5ZXMKICAgIGxvZ2ZpbGU6IC92YXIvbG9nL2NsdXN0ZXIvY29yb3N5bmMubG9nCiAgICB0b19zeXNsb2c6IHllcwogICAgdGltZXN0YW1wOiBvbgp9Cg==", "encoding": "base64", "source": "/tmp/ansible.c0ntqx8p_ha_cluster_corosync_conf" } TASK [fedora.linux_system_roles.ha_cluster : Distribute corosync.conf file] **** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:26 Saturday 23 March 2024 12:05:29 +0000 (0:00:00.327) 0:01:51.132 ******** changed: [sut] => { "changed": true, "checksum": "05d2ec2a2bfa233bb2b4ace4aae02b42cafc012b", "dest": "/etc/corosync/corosync.conf", "gid": 0, "group": "root", "md5sum": "beb73759420421ade7d3b0d2f8dd24ef", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 388, "src": "/root/.ansible/tmp/ansible-tmp-1711195529.1593359-35535-26573999752088/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Remove a corosync.conf tempfile] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:36 Saturday 23 March 2024 12:05:29 +0000 (0:00:00.612) 0:01:51.745 ******** changed: [sut] => { "changed": true, "path": "/tmp/ansible.c0ntqx8p_ha_cluster_corosync_conf", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Cluster auth] ********************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:61 Saturday 23 March 2024 12:05:29 +0000 (0:00:00.254) 0:01:51.999 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check pcs auth status] ************ task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:3 Saturday 23 March 2024 12:05:30 +0000 (0:00:00.039) 0:01:52.039 ******** changed: [sut] => { "changed": true, "cmd": [ "pcs", "status", "pcsd", "--", "localhost" ], "delta": "0:00:00.760345", "end": "2024-03-23 12:05:30.988723", "failed_when_result": false, "rc": 2, "start": "2024-03-23 12:05:30.228378" } STDOUT: localhost: Unable to authenticate STDERR: Warning: Unable to read the known-hosts file: No such file or directory: '/var/lib/pcsd/known-hosts' MSG: non-zero return code TASK [fedora.linux_system_roles.ha_cluster : Run pcs auth] ********************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:23 Saturday 23 March 2024 12:05:31 +0000 (0:00:01.002) 0:01:53.041 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Pcs auth using pcs-0.10] ********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:3 Saturday 23 March 2024 12:05:31 +0000 (0:00:00.029) 0:01:53.071 ******** changed: [sut] => { "changed": true, "cmd": [ "pcs", "host", "auth", "-u", "hacluster", "--", "localhost" ], "delta": "0:00:01.174432", "end": "2024-03-23 12:05:32.450086", "rc": 0, "start": "2024-03-23 12:05:31.275654" } STDOUT: Password: STDERR: localhost: Authorized TASK [fedora.linux_system_roles.ha_cluster : Pcs auth for qdevice using pcs-0.10] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:19 Saturday 23 March 2024 12:05:32 +0000 (0:00:01.432) 0:01:54.503 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_model == \"net\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute cluster shared keys] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:66 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.022) 0:01:54.525 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Get corosync authkey] ************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:3 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.043) 0:01:54.568 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.035) 0:01:54.604 ******** skipping: [sut] => { "changed": false, "false_condition": "preshared_key_src is string and preshared_key_src | length > 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.027) 0:01:54.631 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.027) 0:01:54.659 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.027) 0:01:54.686 ******** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch generated corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.247) 0:01:54.933 ******** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60 Saturday 23 March 2024 12:05:32 +0000 (0:00:00.029) 0:01:54.962 ******** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.239) 0:01:55.202 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes corosync authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.028) 0:01:55.230 ******** skipping: [sut] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Distribute corosync authkey] ****** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:11 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.037) 0:01:55.268 ******** changed: [sut] => { "changed": true, "checksum": "46402a5668656e45df3b948d51e7a6945e4872e3", "dest": "/etc/corosync/authkey", "gid": 0, "group": "root", "md5sum": "1ff2ba018196a7a7911efa08f15b1dfb", "mode": "0400", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 256, "src": "/root/.ansible/tmp/ansible-tmp-1711195533.2917125-35565-249695295435280/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Get pacemaker authkey] ************ task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:20 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.607) 0:01:55.875 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.036) 0:01:55.911 ******** skipping: [sut] => { "changed": false, "false_condition": "preshared_key_src is string and preshared_key_src | length > 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.030) 0:01:55.941 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.026) 0:01:55.968 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44 Saturday 23 March 2024 12:05:33 +0000 (0:00:00.026) 0:01:55.994 ******** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch generated pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50 Saturday 23 March 2024 12:05:34 +0000 (0:00:00.248) 0:01:56.242 ******** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60 Saturday 23 March 2024 12:05:34 +0000 (0:00:00.029) 0:01:56.272 ******** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65 Saturday 23 March 2024 12:05:34 +0000 (0:00:00.240) 0:01:56.512 ******** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes pacemaker authkey] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73 Saturday 23 March 2024 12:05:34 +0000 (0:00:00.031) 0:01:56.543 ******** skipping: [sut] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Distribute pacemaker authkey] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:28 Saturday 23 March 2024 12:05:34 +0000 (0:00:00.087) 0:01:56.631 ******** changed: [sut] => { "changed": true, "checksum": "4fad5fa866677a3716dafc6c227fe3d09d2ad0b8", "dest": "/etc/pacemaker/authkey", "gid": 189, "group": "haclient", "md5sum": "adab832afdf2f2253d19625bfc85a405", "mode": "0400", "owner": "hacluster", "secontext": "system_u:object_r:etc_t:s0", "size": 256, "src": "/root/.ansible/tmp/ansible-tmp-1711195534.654722-35585-122152291914910/source", "state": "file", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Remove qdevice certificates [CLI]] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:37 Saturday 23 March 2024 12:05:35 +0000 (0:00:00.608) 0:01:57.240 ******** skipping: [sut] => { "changed": false, "false_condition": "'corosync.quorum.device.client.model.net.certificates.rest-api.v2' not in __ha_cluster_pcsd_capabilities", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Remove qdevice certificates [API]] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:46 Saturday 23 March 2024 12:05:35 +0000 (0:00:00.022) 0:01:57.262 ******** changed: [sut] => { "changed": true, "pcs_result": { "command": { "command_name": "qdevice.client_net_destroy", "options": { "effective_groups": null, "effective_username": null, "request_timeout": null }, "params": {} }, "kill_reason": null, "reports": [], "result": null, "task_finish_type": "TaskFinishType.SUCCESS", "task_ident": "32576747-adb9-4636-ad0d-238b248577e1" } } TASK [fedora.linux_system_roles.ha_cluster : Obtain and distribute qdevice certificates [CLI]] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:54 Saturday 23 March 2024 12:05:36 +0000 (0:00:01.037) 0:01:58.299 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_in_use", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Obtain and distribute qdevice certificates [API]] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:71 Saturday 23 March 2024 12:05:36 +0000 (0:00:00.022) 0:01:58.321 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_in_use", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Enable or disable cluster services on boot] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:72 Saturday 23 March 2024 12:05:36 +0000 (0:00:00.021) 0:01:58.342 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Enable or disable configured cluster services on boot] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:3 Saturday 23 March 2024 12:05:36 +0000 (0:00:00.041) 0:01:58.384 ******** changed: [sut] => (item=corosync) => { "ansible_loop_var": "item", "changed": true, "enabled": true, "item": "corosync", "name": "corosync", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target sysinit.target system.slice network-online.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Corosync Cluster Engine", "DevicePolicy": "auto", "Documentation": "man:corosync man:corosync.conf man:corosync_overview", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/corosync (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/corosync.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "corosync.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "corosync.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice network-online.target", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4431", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } skipping: [sut] => (item=corosync-qdevice) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item != 'corosync-qdevice' or __ha_cluster_qdevice_in_use", "item": "corosync-qdevice", "skip_reason": "Conditional result was False" } changed: [sut] => (item=pacemaker) => { "ansible_loop_var": "item", "changed": true, "enabled": true, "item": "pacemaker", "name": "pacemaker", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "rsyslog.service time-sync.target sysinit.target system.slice corosync.service systemd-journald.socket basic.target network.target resource-agents-deps.target syslog.service dbus-broker.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Pacemaker High Availability Cluster Manager", "DevicePolicy": "auto", "Documentation": "man:pacemakerd https://clusterlabs.org/pacemaker/doc/", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pacemaker.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pacemaker.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "process", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pacemaker.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice corosync.service sysinit.target", "Restart": "on-failure", "RestartKillSignal": "15", "RestartUSec": "1s", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "25s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SuccessExitStatus": "100", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "infinity", "TimeoutAbortUSec": "30min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "30min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "simple", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "resource-agents-deps.target dbus-broker.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Get services status - detect SBD] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:16 Saturday 23 March 2024 12:05:38 +0000 (0:00:01.736) 0:02:00.120 ******** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "canberra-system-bootup.service": { "name": "canberra-system-bootup.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown-reboot.service": { "name": "canberra-system-shutdown-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown.service": { "name": "canberra-system-shutdown.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "geoclue.service": { "name": "geoclue.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "low-memory-monitor.service": { "name": "low-memory-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rtkit-daemon.service": { "name": "rtkit-daemon.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sbd.service": { "name": "sbd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "sbd_remote.service": { "name": "sbd_remote.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "upower.service": { "name": "upower.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Enable or disable SBD] ************ task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:19 Saturday 23 March 2024 12:05:40 +0000 (0:00:02.596) 0:02:02.716 ******** changed: [sut] => { "changed": true, "enabled": true, "name": "sbd", "status": { "AccessSELinuxContext": "system_u:object_r:sbd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target basic.target systemd-journald.socket iscsi.service system.slice systemd-modules-load.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "dlm.service shutdown.target pacemaker.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "no", "CanStop": "no", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Shared-storage based fencing daemon", "DevicePolicy": "auto", "Documentation": "\"man:sbd(8)\"", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /etc/systemd/system/sbd.service.d/override-timeout.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/sbd ; argv[]=/usr/sbin/sbd $SBD_OPTS -p /run/sbd.pid watch ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/sbd ; argv[]=/usr/sbin/sbd $SBD_OPTS -p /run/sbd.pid watch ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/kill ; argv[]=/usr/bin/kill -TERM $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/kill ; argv[]=/usr/bin/kill -TERM $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/sbd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "sbd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "sbd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/sbd.pid", "PartOf": "corosync.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "yes", "RefuseManualStop": "yes", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice", "Restart": "on-abort", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4431", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Start the cluster and reload corosync.conf] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:75 Saturday 23 March 2024 12:05:41 +0000 (0:00:00.870) 0:02:03.587 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Get services status - detect corosync-qdevice] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:21 Saturday 23 March 2024 12:05:41 +0000 (0:00:00.046) 0:02:03.634 ******** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "canberra-system-bootup.service": { "name": "canberra-system-bootup.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown-reboot.service": { "name": "canberra-system-shutdown-reboot.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "canberra-system-shutdown.service": { "name": "canberra-system-shutdown.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.home1.service": { "name": "dbus-org.freedesktop.home1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dlm.service": { "name": "dlm.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "geoclue.service": { "name": "geoclue.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "low-memory-monitor.service": { "name": "low-memory-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rtkit-daemon.service": { "name": "rtkit-daemon.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sbd.service": { "name": "sbd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sbd_remote.service": { "name": "sbd_remote.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd-wait-online@.service": { "name": "systemd-networkd-wait-online@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "active" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "upower.service": { "name": "upower.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" } } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Stop cluster daemons to reload configuration] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:24 Saturday 23 March 2024 12:05:44 +0000 (0:00:02.600) 0:02:06.235 ******** ok: [sut] => (item=pacemaker) => { "ansible_loop_var": "item", "changed": false, "item": "pacemaker", "name": "pacemaker", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "syslog.service network.target basic.target sysinit.target rsyslog.service sbd.service systemd-journald.socket time-sync.target dbus-broker.service system.slice resource-agents-deps.target corosync.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Pacemaker High Availability Cluster Manager", "DevicePolicy": "auto", "Documentation": "man:pacemakerd https://clusterlabs.org/pacemaker/doc/", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pacemaker.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pacemaker.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "process", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pacemaker.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target corosync.service sbd.service", "Restart": "on-failure", "RestartKillSignal": "15", "RestartUSec": "1s", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "25s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2024-03-23 12:05:41 UTC", "StateChangeTimestampMonotonic": "1132443575", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SuccessExitStatus": "100", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "infinity", "TimeoutAbortUSec": "30min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "30min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "simple", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "resource-agents-deps.target dbus-broker.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } skipping: [sut] => (item=corosync-qdevice) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item != 'corosync-qdevice' or 'corosync-qdevice.service' in ansible_facts.services\n", "item": "corosync-qdevice", "skip_reason": "Conditional result was False" } ok: [sut] => (item=corosync) => { "ansible_loop_var": "item", "changed": false, "item": "corosync", "name": "corosync", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target basic.target system.slice systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target pacemaker.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "sbd.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Corosync Cluster Engine", "DevicePolicy": "auto", "Documentation": "man:corosync man:corosync.conf man:corosync_overview", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/corosync (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/corosync.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "corosync.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "corosync.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "RequiredBy": "pacemaker.service", "Requires": "network-online.target system.slice sysinit.target sbd.service", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2024-03-23 12:05:38 UTC", "StateChangeTimestampMonotonic": "1128985112", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4431", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Start corosync] ******************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:47 Saturday 23 March 2024 12:05:45 +0000 (0:00:00.815) 0:02:07.050 ******** changed: [sut] => { "changed": true, "name": "corosync", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "network-online.target basic.target system.slice systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target pacemaker.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "sbd.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Corosync Cluster Engine", "DevicePolicy": "auto", "Documentation": "man:corosync man:corosync.conf man:corosync_overview", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/corosync (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/corosync ; argv[]=/usr/sbin/corosync -f $COROSYNC_OPTIONS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/sbin/corosync-cfgtool ; argv[]=/usr/sbin/corosync-cfgtool -H --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/corosync.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "corosync.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "corosync.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "RequiredBy": "pacemaker.service", "Requires": "network-online.target system.slice sysinit.target sbd.service", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2024-03-23 12:05:38 UTC", "StateChangeTimestampMonotonic": "1128985112", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "4431", "TimeoutAbortUSec": "45s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "45s", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "45s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Reload corosync configuration] **** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:56 Saturday 23 March 2024 12:05:46 +0000 (0:00:01.038) 0:02:08.089 ******** ok: [sut] => { "changed": false, "cmd": [ "corosync-cfgtool", "-R" ], "delta": "0:00:00.012963", "end": "2024-03-23 12:05:46.290456", "rc": 0, "start": "2024-03-23 12:05:46.277493" } STDOUT: Reloading corosync.conf... Done TASK [fedora.linux_system_roles.ha_cluster : Start corosync-qdevice] *********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:63 Saturday 23 March 2024 12:05:46 +0000 (0:00:00.253) 0:02:08.342 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_in_use", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Start pacemaker] ****************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:69 Saturday 23 March 2024 12:05:46 +0000 (0:00:00.021) 0:02:08.364 ******** changed: [sut] => { "changed": true, "name": "pacemaker", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:cluster_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "syslog.service network.target basic.target sysinit.target rsyslog.service sbd.service systemd-journald.socket time-sync.target dbus-broker.service system.slice resource-agents-deps.target corosync.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Pacemaker High Availability Cluster Manager", "DevicePolicy": "auto", "Documentation": "man:pacemakerd https://clusterlabs.org/pacemaker/doc/", "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/sbd (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/pacemakerd ; argv[]=/usr/sbin/pacemakerd ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/pacemaker.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "pacemaker.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "process", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "14772", "LimitNPROCSoft": "14772", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14772", "LimitSIGPENDINGSoft": "14772", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MemoryZSwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "pacemaker.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target corosync.service sbd.service", "Restart": "on-failure", "RestartKillSignal": "15", "RestartUSec": "1s", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "25s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2024-03-23 12:05:41 UTC", "StateChangeTimestampMonotonic": "1132443575", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SuccessExitStatus": "100", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "infinity", "TimeoutAbortUSec": "30min", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min", "TimeoutStopFailureMode": "abort", "TimeoutStopUSec": "30min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "simple", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "resource-agents-deps.target dbus-broker.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.ha_cluster : Wait for the cluster to fully start and form membership] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:75 Saturday 23 March 2024 12:05:50 +0000 (0:00:04.237) 0:02:12.602 ******** ok: [sut] => { "changed": false, "cmd": [ "pcs", "cluster", "start", "--all", "--wait" ], "delta": "0:00:26.450290", "end": "2024-03-23 12:06:17.243194", "rc": 0, "start": "2024-03-23 12:05:50.792904" } STDERR: localhost: Starting Cluster... Waiting for node(s) to start... localhost: Started TASK [fedora.linux_system_roles.ha_cluster : List pacemaker nodes] ************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:89 Saturday 23 March 2024 12:06:17 +0000 (0:00:26.692) 0:02:39.295 ******** ok: [sut] => { "changed": false, "cmd": "set -euo pipefail; crm_mon -X | xmllint --xpath '/crm_mon/nodes/node/@name' - | sed -E 's/\\s*name=\"([^\"]+)\"\\s*/\\1\\n/g'\n", "delta": "0:00:00.024220", "end": "2024-03-23 12:06:17.509778", "rc": 0, "start": "2024-03-23 12:06:17.485558" } STDOUT: localhost TASK [fedora.linux_system_roles.ha_cluster : Purge removed nodes from pacemaker's cache] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:111 Saturday 23 March 2024 12:06:17 +0000 (0:00:00.267) 0:02:39.563 ******** skipping: [sut] => (item=localhost) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item not in __ha_cluster_all_node_names", "item": "localhost", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Create and push CIB] ************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:78 Saturday 23 March 2024 12:06:17 +0000 (0:00:00.024) 0:02:39.587 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Create a tempfile for original CIB] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:10 Saturday 23 March 2024 12:06:17 +0000 (0:00:00.060) 0:02:39.648 ******** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Create a tempfile for new CIB] **** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:18 Saturday 23 March 2024 12:06:17 +0000 (0:00:00.239) 0:02:39.887 ******** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.yxr59x_m_ha_cluster_cib_xml", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Fetch CIB configuration] ********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:26 Saturday 23 March 2024 12:06:18 +0000 (0:00:00.240) 0:02:40.128 ******** ok: [sut] => { "changed": false, "cmd": [ "cibadmin", "--query" ], "delta": "0:00:00.013735", "end": "2024-03-23 12:06:18.325573", "rc": 0, "start": "2024-03-23 12:06:18.311838" } STDOUT: TASK [fedora.linux_system_roles.ha_cluster : Write CIB configuration] ********** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:33 Saturday 23 March 2024 12:06:18 +0000 (0:00:00.248) 0:02:40.377 ******** changed: [sut] => (item=/tmp/ansible.yxr59x_m_ha_cluster_cib_xml) => { "ansible_loop_var": "item", "changed": true, "checksum": "44a9e5e5bace182fa1a7f64583d53a2d02da0eb2", "dest": "/tmp/ansible.yxr59x_m_ha_cluster_cib_xml", "gid": 0, "group": "root", "item": "/tmp/ansible.yxr59x_m_ha_cluster_cib_xml", "md5sum": "fd8bad3510ef17b1d283594c51deba13", "mode": "0600", "owner": "root", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 1397, "src": "/root/.ansible/tmp/ansible-tmp-1711195578.4011266-35632-225776820362584/source", "state": "file", "uid": 0 } changed: [sut] => (item=/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml) => { "ansible_loop_var": "item", "changed": true, "checksum": "44a9e5e5bace182fa1a7f64583d53a2d02da0eb2", "dest": "/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml", "gid": 0, "group": "root", "item": "/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml", "md5sum": "fd8bad3510ef17b1d283594c51deba13", "mode": "0600", "owner": "root", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 1397, "src": "/root/.ansible/tmp/ansible-tmp-1711195579.0060635-35632-59097453420261/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Purge new CIB configuration] ****** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:51 Saturday 23 March 2024 12:06:19 +0000 (0:00:01.234) 0:02:41.612 ******** changed: [sut] => { "changed": true, "cmd": [ "cibadmin", "--force", "--delete-all", "--xpath", "/cib/configuration/*[not(\n self::crm_config or\n self::nodes or\n self::resources or\n self::constraints\n)] | /cib/configuration/*[self::resources or self::constraints]/* | /cib/configuration/nodes/*/* | /cib/configuration/crm_config//nvpair[not(\n @name=\"cluster-infrastructure\" or\n @name=\"cluster-name\" or\n @name=\"dc-version\" or\n @name=\"have-watchdog\" or\n @name=\"last-lrm-refresh\" or\n @name=\"stonith-watchdog-timeout\"\n)]" ], "delta": "0:00:00.024236", "end": "2024-03-23 12:06:19.828337", "rc": 0, "start": "2024-03-23 12:06:19.804101" } TASK [fedora.linux_system_roles.ha_cluster : Configure cluster properties] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:85 Saturday 23 March 2024 12:06:19 +0000 (0:00:00.269) 0:02:41.882 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_cluster_properties[0].attrs | d([])", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure node attributes] ******** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:95 Saturday 23 March 2024 12:06:19 +0000 (0:00:00.021) 0:02:41.904 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource defaults] ****** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:105 Saturday 23 March 2024 12:06:19 +0000 (0:00:00.024) 0:02:41.928 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource operation defaults] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:117 Saturday 23 March 2024 12:06:19 +0000 (0:00:00.020) 0:02:41.949 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Extract primitive to bundle mapping] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:127 Saturday 23 March 2024 12:06:19 +0000 (0:00:00.024) 0:02:41.974 ******** ok: [sut] => { "ansible_facts": { "__ha_cluster_primitive_bundle_map": {} }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Configure cluster bundle resources] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:131 Saturday 23 March 2024 12:06:19 +0000 (0:00:00.027) 0:02:42.001 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure cluster resources] ****** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:137 Saturday 23 March 2024 12:06:19 +0000 (0:00:00.011) 0:02:42.013 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure cluster resource groups] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:144 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.011) 0:02:42.025 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure cluster resource clones] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:150 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.011) 0:02:42.036 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure stonith levels] ********* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:157 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.010) 0:02:42.047 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource location constraints] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:165 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.012) 0:02:42.060 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource colocation constraints] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:172 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.011) 0:02:42.072 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource set colocation constraints] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:180 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.011) 0:02:42.083 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource order constraints] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:190 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.011) 0:02:42.094 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource set order constraints] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:198 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.011) 0:02:42.106 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource ticket constraints] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:208 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.011) 0:02:42.117 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure resource set ticket constraints] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:216 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.012) 0:02:42.130 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure acls] ******************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:226 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.012) 0:02:42.143 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Configure ACL roles] ************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml:3 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.034) 0:02:42.177 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure ACL users] ************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml:28 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.022) 0:02:42.200 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure ACL groups] ************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cib-acls.yml:44 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.022) 0:02:42.222 ******** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Create a tempfile for CIB diff] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:233 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.024) 0:02:42.246 ******** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.21tdthal_ha_cluster_cib_diff", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Compare new and original CIB] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:241 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.240) 0:02:42.486 ******** changed: [sut] => { "changed": true, "cmd": [ "crm_diff", "--no-version", "--original", "/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml", "--new", "/tmp/ansible.yxr59x_m_ha_cluster_cib_xml" ], "delta": "0:00:00.008412", "end": "2024-03-23 12:06:20.686321", "failed_when_result": false, "rc": 0, "start": "2024-03-23 12:06:20.677909" } TASK [fedora.linux_system_roles.ha_cluster : Write CIB diff to its tempfile] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:254 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.253) 0:02:42.740 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_cib_diff.rc == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Push CIB diff to the cluster if it has any changes] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:265 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.022) 0:02:42.763 ******** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_cib_diff.rc == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Remove CIB tempfiles] ************* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:274 Saturday 23 March 2024 12:06:20 +0000 (0:00:00.022) 0:02:42.785 ******** changed: [sut] => (item={'changed': True, 'path': '/tmp/ansible.yxr59x_m_ha_cluster_cib_xml', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0600', 'state': 'file', 'secontext': 'unconfined_u:object_r:user_tmp_t:s0', 'size': 0, 'failed': False}) => { "ansible_loop_var": "item", "changed": true, "item": { "changed": true, "failed": false, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.yxr59x_m_ha_cluster_cib_xml", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 }, "path": "/tmp/ansible.yxr59x_m_ha_cluster_cib_xml", "state": "absent" } changed: [sut] => (item={'changed': True, 'path': '/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0600', 'state': 'file', 'secontext': 'unconfined_u:object_r:user_tmp_t:s0', 'size': 0, 'failed': False}) => { "ansible_loop_var": "item", "changed": true, "item": { "changed": true, "failed": false, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 }, "path": "/tmp/ansible.zsye82ax_ha_cluster_original_cib_xml", "state": "absent" } changed: [sut] => (item={'changed': True, 'path': '/tmp/ansible.21tdthal_ha_cluster_cib_diff', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0600', 'state': 'file', 'secontext': 'unconfined_u:object_r:user_tmp_t:s0', 'size': 0, 'failed': False}) => { "ansible_loop_var": "item", "changed": true, "item": { "changed": true, "failed": false, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.21tdthal_ha_cluster_cib_diff", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 }, "path": "/tmp/ansible.21tdthal_ha_cluster_cib_diff", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Remove cluster configuration] ***** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:87 Saturday 23 March 2024 12:06:21 +0000 (0:00:00.724) 0:02:43.510 ******** skipping: [sut] => { "changed": false, "false_condition": "not ha_cluster_cluster_present", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Remove fence-virt authkey] ******** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:90 Saturday 23 March 2024 12:06:21 +0000 (0:00:00.012) 0:02:43.522 ******** skipping: [sut] => { "changed": false, "false_condition": "not ha_cluster_cluster_present", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure qnetd] ****************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:95 Saturday 23 March 2024 12:06:21 +0000 (0:00:00.014) 0:02:43.536 ******** included: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Remove qnetd configuration] ******* task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:3 Saturday 23 March 2024 12:06:21 +0000 (0:00:00.049) 0:02:43.586 ******** changed: [sut] => { "changed": true, "cmd": [ "pcs", "--force", "--", "qdevice", "destroy", "net" ], "delta": "0:00:01.922685", "end": "2024-03-23 12:06:23.702249", "rc": 0, "start": "2024-03-23 12:06:21.779564" } STDERR: Stopping quorum device... quorum device stopped quorum device disabled Quorum device 'net' configuration files removed TASK [fedora.linux_system_roles.ha_cluster : Setup qnetd] ********************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:16 Saturday 23 March 2024 12:06:23 +0000 (0:00:02.169) 0:02:45.756 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Enable or disable qnetd service on boot] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:26 Saturday 23 March 2024 12:06:23 +0000 (0:00:00.022) 0:02:45.778 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [Slurp generated SBD watchdog blocklist file] ***************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:30 Saturday 23 March 2024 12:06:23 +0000 (0:00:00.025) 0:02:45.804 ******** ok: [sut] => { "changed": false, "content": "YmxhY2tsaXN0IGlUQ09fd2R0Cg==", "encoding": "base64", "source": "/etc/modprobe.d/iTCO_wdt.conf" } TASK [Decode SBD watchdog blocklist file] ************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:35 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.233) 0:02:46.037 ******** ok: [sut] => { "ansible_facts": { "__test_sbd_watchdog_blocklist_file_lines": [ "blacklist iTCO_wdt" ] }, "changed": false } TASK [Print SBD watchdog blocklist file lines] ********************************* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:39 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.025) 0:02:46.062 ******** ok: [sut] => { "__test_sbd_watchdog_blocklist_file_lines": [ "blacklist iTCO_wdt" ] } TASK [Check SBD watchdog blocklist file] *************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:43 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.014) 0:02:46.076 ******** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Slurp generated SBD watchdog modprobe file] ****************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:51 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.014) 0:02:46.090 ******** ok: [sut] => { "changed": false, "content": "c29mdGRvZwo=", "encoding": "base64", "source": "/etc/modules-load.d/softdog.conf" } TASK [Decode SBD watchdog modprobe file] *************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:56 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.223) 0:02:46.314 ******** ok: [sut] => { "ansible_facts": { "__test_sbd_watchdog_modprobe_file_lines": [ "softdog" ] }, "changed": false } TASK [Print SBD watchdog modprobe file lines] ********************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:60 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.023) 0:02:46.337 ******** ok: [sut] => { "__test_sbd_watchdog_modprobe_file_lines": [ "softdog" ] } TASK [Check SBD watchdog modprobe file] **************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:64 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.014) 0:02:46.351 ******** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Run lsmod for SBD watchdog module] *************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:71 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.016) 0:02:46.368 ******** ok: [sut] => { "changed": false, "cmd": [ "lsmod" ], "delta": "0:00:00.005950", "end": "2024-03-23 12:06:24.550517", "rc": 0, "start": "2024-03-23 12:06:24.544567" } STDOUT: Module Size Used by sctp 528384 2 ip6_udp_tunnel 16384 1 sctp udp_tunnel 32768 1 sctp tls 151552 0 softdog 16384 0 rfkill 40960 1 sunrpc 888832 1 intel_rapl_msr 20480 0 intel_rapl_common 40960 1 intel_rapl_msr snd_pcsp 24576 0 snd_pcm 184320 1 snd_pcsp snd_timer 53248 1 snd_pcm snd 155648 3 snd_timer,snd_pcsp,snd_pcm i2c_piix4 32768 0 rapl 20480 0 soundcore 16384 1 snd fuse 212992 1 loop 40960 2 zram 32768 2 crct10dif_pclmul 12288 1 crc32_pclmul 12288 0 crc32c_intel 16384 2 polyval_clmulni 12288 0 polyval_generic 12288 1 polyval_clmulni ghash_clmulni_intel 16384 0 xen_blkfront 57344 2 sha512_ssse3 53248 0 xen_netfront 57344 1 sha256_ssse3 32768 0 ata_generic 12288 0 sha1_ssse3 32768 0 cirrus 16384 0 pata_acpi 12288 0 serio_raw 16384 0 TASK [Print lsmod] ************************************************************* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:76 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.233) 0:02:46.601 ******** ok: [sut] => { "__test_sbd_watchdog_sbd_lsmod": { "changed": false, "cmd": [ "lsmod" ], "delta": "0:00:00.005950", "end": "2024-03-23 12:06:24.550517", "failed": false, "msg": "", "rc": 0, "start": "2024-03-23 12:06:24.544567", "stderr": "", "stderr_lines": [], "stdout": "Module Size Used by\nsctp 528384 2\nip6_udp_tunnel 16384 1 sctp\nudp_tunnel 32768 1 sctp\ntls 151552 0\nsoftdog 16384 0\nrfkill 40960 1\nsunrpc 888832 1\nintel_rapl_msr 20480 0\nintel_rapl_common 40960 1 intel_rapl_msr\nsnd_pcsp 24576 0\nsnd_pcm 184320 1 snd_pcsp\nsnd_timer 53248 1 snd_pcm\nsnd 155648 3 snd_timer,snd_pcsp,snd_pcm\ni2c_piix4 32768 0\nrapl 20480 0\nsoundcore 16384 1 snd\nfuse 212992 1\nloop 40960 2\nzram 32768 2\ncrct10dif_pclmul 12288 1\ncrc32_pclmul 12288 0\ncrc32c_intel 16384 2\npolyval_clmulni 12288 0\npolyval_generic 12288 1 polyval_clmulni\nghash_clmulni_intel 16384 0\nxen_blkfront 57344 2\nsha512_ssse3 53248 0\nxen_netfront 57344 1\nsha256_ssse3 32768 0\nata_generic 12288 0\nsha1_ssse3 32768 0\ncirrus 16384 0\npata_acpi 12288 0\nserio_raw 16384 0", "stdout_lines": [ "Module Size Used by", "sctp 528384 2", "ip6_udp_tunnel 16384 1 sctp", "udp_tunnel 32768 1 sctp", "tls 151552 0", "softdog 16384 0", "rfkill 40960 1", "sunrpc 888832 1", "intel_rapl_msr 20480 0", "intel_rapl_common 40960 1 intel_rapl_msr", "snd_pcsp 24576 0", "snd_pcm 184320 1 snd_pcsp", "snd_timer 53248 1 snd_pcm", "snd 155648 3 snd_timer,snd_pcsp,snd_pcm", "i2c_piix4 32768 0", "rapl 20480 0", "soundcore 16384 1 snd", "fuse 212992 1", "loop 40960 2", "zram 32768 2", "crct10dif_pclmul 12288 1", "crc32_pclmul 12288 0", "crc32c_intel 16384 2", "polyval_clmulni 12288 0", "polyval_generic 12288 1 polyval_clmulni", "ghash_clmulni_intel 16384 0", "xen_blkfront 57344 2", "sha512_ssse3 53248 0", "xen_netfront 57344 1", "sha256_ssse3 32768 0", "ata_generic 12288 0", "sha1_ssse3 32768 0", "cirrus 16384 0", "pata_acpi 12288 0", "serio_raw 16384 0" ] } } TASK [Check lsmod output for absence of SBD watchdog module blocklist] ********* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:80 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.015) 0:02:46.617 ******** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check lsmod output for SBD watchdog module] ****************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:85 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.013) 0:02:46.630 ******** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Slurp SBD config file] *************************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:90 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.014) 0:02:46.644 ******** ok: [sut] => { "changed": false, "content": "IwojIEFuc2libGUgbWFuYWdlZAojCiMgc3lzdGVtX3JvbGU6aGFfY2x1c3RlcgoKCgojIyBUeXBlOiBzdHJpbmcKIyMgRGVmYXVsdDogIiIKIwojIFNCRF9ERVZJQ0Ugc3BlY2lmaWVzIHRoZSBkZXZpY2VzIHRvIHVzZSBmb3IgZXhjaGFuZ2luZyBzYmQgbWVzc2FnZXMKIyBhbmQgdG8gbW9uaXRvci4gSWYgc3BlY2lmeWluZyBtb3JlIHRoYW4gb25lIHBhdGgsIHVzZSAiOyIgYXMKIyBzZXBhcmF0b3IuCiMKU0JEX0RFVklDRT0iL2Rldi9sb29wMCIKCiMjIFR5cGU6IHllc25vCiMjIERlZmF1bHQ6IHllcwojCiMgV2hldGhlciB0byBlbmFibGUgdGhlIHBhY2VtYWtlciBpbnRlZ3JhdGlvbi4KIwpTQkRfUEFDRU1BS0VSPXllcwoKIyMgVHlwZTogYWx3YXlzIC8gY2xlYW4KIyMgRGVmYXVsdDogYWx3YXlzCiMKIyBTcGVjaWZ5IHRoZSBzdGFydCBtb2RlIGZvciBzYmQuIFNldHRpbmcgdGhpcyB0byAiY2xlYW4iIHdpbGwgb25seQojIGFsbG93IHNiZCB0byBzdGFydCBpZiBpdCB3YXMgbm90IHByZXZpb3VzbHkgZmVuY2VkLiBTZWUgdGhlIC1TIG9wdGlvbgojIGluIHRoZSBtYW4gcGFnZS4KIwpTQkRfU1RBUlRNT0RFPSJjbGVhbiIKCiMjIFR5cGU6IHllc25vIC8gaW50ZWdlcgojIyBEZWZhdWx0OiBubwojCiMgV2hldGhlciB0byBkZWxheSBhZnRlciBzdGFydGluZyBzYmQgb24gYm9vdCBmb3IgIm1zZ3dhaXQiIHNlY29uZHMuCiMgVGhpcyBtYXkgYmUgbmVjZXNzYXJ5IGlmIHlvdXIgY2x1c3RlciBub2RlcyByZWJvb3Qgc28gZmFzdCB0aGF0IHRoZQojIG90aGVyIG5vZGVzIGFyZSBzdGlsbCB3YWl0aW5nIGluIHRoZSBmZW5jZSBhY2tub3dsZWRnZW1lbnQgcGhhc2UuCiMgVGhpcyBpcyBhbiBvY2Nhc2lvbmFsIGlzc3VlIHdpdGggdmlydHVhbCBtYWNoaW5lcy4KIwojIFRoaXMgY2FuIGFsc28gYmUgZW5hYmxlZCBieSBiZWluZyBzZXQgdG8gYSBzcGVjaWZpYyBkZWxheSB2YWx1ZSwgaW4KIyBzZWNvbmRzLiBTb21ldGltZXMgYSBsb25nZXIgZGVsYXkgdGhhbiB0aGUgZGVmYXVsdCwgIm1zZ3dhaXQiLCBpcwojIG5lZWRlZCwgZm9yIGV4YW1wbGUgaW4gdGhlIGNhc2VzIHdoZXJlIGl0J3MgY29uc2lkZXJlZCB0byBiZSBzYWZlciB0bwojIHdhaXQgbG9uZ2VyIHRoYW46CiMgY29yb3N5bmMgdG9rZW4gdGltZW91dCArIGNvbnNlbnN1cyB0aW1lb3V0ICsgcGNta19kZWxheV9tYXggKyBtc2d3YWl0CiMKIyBCZSBhd2FyZSB0aGF0IHRoZSBzcGVjaWFsIHZhbHVlICIxIiBtZWFucyAieWVzIiByYXRoZXIgdGhhbiAiMXMiLgojCiMgQ29uc2lkZXIgdGhhdCB5b3UgbWlnaHQgaGF2ZSB0byBhZGFwdCB0aGUgc3RhcnR1cC10aW1lb3V0IGFjY29yZGluZ2x5CiMgaWYgdGhlIGRlZmF1bHQgaXNuJ3Qgc3VmZmljaWVudC4gKFRpbWVvdXRTdGFydFNlYyBmb3Igc3lzdGVtZCkKIwojIFRoaXMgb3B0aW9uIG1heSBiZSBpZ25vcmVkIGF0IGEgbGF0ZXIgcG9pbnQsIG9uY2UgcGFjZW1ha2VyIGhhbmRsZXMKIyB0aGlzIGNhc2UgYmV0dGVyLgojClNCRF9ERUxBWV9TVEFSVD0iMiIKCiMjIFR5cGU6IHN0cmluZwojIyBEZWZhdWx0OiAvZGV2L3dhdGNoZG9nCiMKIyBXYXRjaGRvZyBkZXZpY2UgdG8gdXNlLiBJZiBzZXQgdG8gL2Rldi9udWxsLCBubyB3YXRjaGRvZyBkZXZpY2Ugd2lsbAojIGJlIHVzZWQuCiMKU0JEX1dBVENIRE9HX0RFVj0iL2Rldi9udWxsIgoKIyMgVHlwZTogaW50ZWdlcgojIyBEZWZhdWx0OiA1CiMKIyBIb3cgbG9uZywgaW4gc2Vjb25kcywgdGhlIHdhdGNoZG9nIHdpbGwgd2FpdCBiZWZvcmUgcGFuaWNraW5nIHRoZQojIG5vZGUgaWYgbm8tb25lIHRpY2tsZXMgaXQuCiMKIyBUaGlzIGRlcGVuZHMgbW9zdGx5IG9uIHlvdXIgc3RvcmFnZSBsYXRlbmN5OyB0aGUgbWFqb3JpdHkgb2YgZGV2aWNlcwojIG11c3QgYmUgc3VjY2Vzc2Z1bGx5IHJlYWQgd2l0aGluIHRoaXMgdGltZSwgb3IgZWxzZSB0aGUgbm9kZSB3aWxsCiMgc2VsZi1mZW5jZS4KIwojIElmIHlvdXIgc2JkIGRldmljZShzKSByZXNpZGUgb24gYSBtdWx0aXBhdGggc2V0dXAgb3IgaVNDU0ksIHRoaXMKIyBzaG91bGQgYmUgdGhlIHRpbWUgcmVxdWlyZWQgdG8gZGV0ZWN0IGEgcGF0aCBmYWlsdXJlLgojCiMgQmUgYXdhcmUgdGhhdCB3YXRjaGRvZyB0aW1lb3V0IHNldCBpbiB0aGUgb24tZGlzayBtZXRhZGF0YSB0YWtlcwojIHByZWNlZGVuY2UuCiMKU0JEX1dBVENIRE9HX1RJTUVPVVQ9IjEwIgoKIyMgVHlwZTogc3RyaW5nCiMjIERlZmF1bHQ6ICJmbHVzaCxyZWJvb3QiCiMKIyBBY3Rpb25zIHRvIGJlIGV4ZWN1dGVkIHdoZW4gdGhlIHdhdGNoZXJzIGRvbid0IHRpbWVseSByZXBvcnQgdG8gdGhlIHNiZAojIG1haW4gcHJvY2VzcyBvciBvbmUgb2YgdGhlIHdhdGNoZXJzIGRldGVjdHMgdGhhdCB0aGUgbWFpbiBwcm9jZXNzIGhhcwojIGRpZWQuCiMKIyBTZXQgdGltZW91dC1hY3Rpb24gdG8gY29tbWEtc2VwYXJhdGVkIGNvbWJpbmF0aW9uIG9mCiMgbm9mbHVzaHxmbHVzaCBwbHVzIHJlYm9vdHxjcmFzaGR1bXB8b2ZmLgojIElmIGp1c3Qgb25lIG9mIGJvdGggaXMgZ2l2ZW4gdGhlIG90aGVyIHN0YXlzIGF0IHRoZSBkZWZhdWx0LgojCiMgVGhpcyBkb2Vzbid0IGFmZmVjdCBhY3Rpb25zIGxpa2Ugb2ZmLCBjcmFzaGR1bXAsIHJlYm9vdCBleHBsaWNpdGx5CiMgdHJpZ2dlcmVkIHZpYSBtZXNzYWdlIHNsb3RzLgojIEFuZCBpdCBkb2VzIGFzIHdlbGwgbm90IGNvbmZpZ3VyZSB0aGUgYWN0aW9uIGEgd2F0Y2hkb2cgd291bGQKIyB0cmlnZ2VyIHNob3VsZCBpdCBydW4gb2ZmICh0aGVyZSBpcyBubyBnZW5lcmljIGludGVyZmFjZSkuCiMKU0JEX1RJTUVPVVRfQUNUSU9OPSJyZWJvb3QsZmx1c2giCgojIyBUeXBlOiB5ZXNubyAvIGF1dG8KIyMgRGVmYXVsdDogYXV0bwojCiMgSWYgQ1BVQWNjb3VudGluZyBpcyBlbmFibGVkIGRlZmF1bHQgaXMgbm90IHRvIGFzc2lnbiBhbnkgUlQtYnVkZ2V0CiMgdG8gdGhlIHN5c3RlbS5zbGljZSB3aGljaCBwcmV2ZW50cyBzYmQgZnJvbSBydW5uaW5nIFJSLXNjaGVkdWxlZC4KIwojIE9uZSB3YXkgdG8gZXNjYXBlIHRoYXQgaXNzdWUgaXMgdG8gbW92ZSBzYmQtcHJvY2Vzc2VzIGZyb20gdGhlCiMgc2xpY2UgdGhleSB3ZXJlIG9yaWdpbmFsbHkgc3RhcnRlZCB0byByb290LXNsaWNlLgojIE9mIGNvdXJzZSBzdGFydGluZyBzYmQgaW4gYSBjZXJ0YWluIHNsaWNlIG1pZ2h0IGJlIGludGVudGlvbmFsLgojIFRodXMgaW4gYXV0by1tb2RlIHNiZCB3aWxsIGNoZWNrIGlmIHRoZSBzbGljZSBoYXMgUlQtYnVkZ2V0IGFzc2lnbmVkLgojIElmIHRoYXQgaXMgdGhlIGNhc2Ugc2JkIHdpbGwgc3RheSBpbiB0aGF0IHNsaWNlIHdoaWxlIGl0IHdpbGwKIyBiZSBtb3ZlZCB0byByb290LXNsaWNlIG90aGVyd2lzZS4KIwpTQkRfTU9WRV9UT19ST09UX0NHUk9VUD1hdXRvCgojIyBUeXBlOiB5ZXNubwojIyBEZWZhdWx0OiB5ZXMKIwojIElmIHJlc291cmNlIHN0YXJ0dXAgc3luY2luZyBpcyBlbmFibGVkIHRoZW4gcGFjZW1ha2VyZCBpcwojIGdvbm5hIHdhaXQgdG8gYmUgcGluZ2VkIHZpYSBJUEMgYmVmb3JlIGl0IHN0YXJ0cyByZXNvdXJjZXMuCiMgT24gc2h1dGRvd24gcGFjZW1ha2VyZCBpcyBnb2luZyB0byB3YWl0IGluIGEgc3RhdGUgd2hlcmUgaXQKIyBoYXMgY2xlYW5seSBzaHV0ZG93biByZXNvdXJjZXMgdGlsbCBzYmQgZmV0Y2hlcyB0aGF0IHN0YXRlLgojCiMgVGhlIGRlZmF1bHQgaXMgc2V0IHdoZW4gYnVpbGRpbmcgU0JEIGFuZCBQYWNlbWFrZXIgZnJvbSBzb3VyY2UuCiMgR29pbmcgZm9yICdubycgaXMgc2FmZXIgaWYgaXQgY2FuJ3QgYmUgYXNzdXJlZCB0aGF0IFNCRCBhbmQKIyBQYWNlbWFrZXIgaW5zdGFsbGVkIGRvIGJvdGggc3VwcG9ydCB0aGUgc3luY2hyb25pemF0aW9uIGZlYXR1cmUuCiMgV2hlbiBnb2luZyB3aXRoICd5ZXMnIC0gYWxzbyB1c2luZyBwYWNrYWdlIGRlcGVuZGVuY2llcyB0bwojIGFzc3VyZSBTQkQgJiBQYWNlbWFrZXIgYm90aCBzdXBwb3J0IHRoZSBzeW5jaHJvbml6YXRpb24KIyBmZWF0dXJlIGFuZCBhcmUgYXNzdW1pbmcgdGhlIHNhbWUgZGVmYXVsdCAtIGFuIFNCRCBjb25maWd1cmF0aW9uCiMgaW5oZXJpdGVkIHZpYSBhbiB1cGdyYWRlIGRvZXNuJ3QgaGF2ZSB0byBiZSBhbHRlcmVkIHRvIHN0aWxsCiMgYmVuZWZpdCBmcm9tIHRoZSBuZXcgZmVhdHVyZS4KIwpTQkRfU1lOQ19SRVNPVVJDRV9TVEFSVFVQPXllcwoKIyMgVHlwZTogc3RyaW5nCiMjIERlZmF1bHQ6ICIiCiMKIyBBZGRpdGlvbmFsIG9wdGlvbnMgZm9yIHN0YXJ0aW5nIHNiZAojClNCRF9PUFRTPSItbiBsb2NhbGhvc3QiCg==", "encoding": "base64", "source": "/etc/sysconfig/sbd" } TASK [Decode SBD config] ******************************************************* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:95 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.224) 0:02:46.869 ******** ok: [sut] => { "ansible_facts": { "__test_sbd_config_lines": [ "#", "# Ansible managed", "#", "# system_role:ha_cluster", "", "", "", "## Type: string", "## Default: \"\"", "#", "# SBD_DEVICE specifies the devices to use for exchanging sbd messages", "# and to monitor. If specifying more than one path, use \";\" as", "# separator.", "#", "SBD_DEVICE=\"/dev/loop0\"", "", "## Type: yesno", "## Default: yes", "#", "# Whether to enable the pacemaker integration.", "#", "SBD_PACEMAKER=yes", "", "## Type: always / clean", "## Default: always", "#", "# Specify the start mode for sbd. Setting this to \"clean\" will only", "# allow sbd to start if it was not previously fenced. See the -S option", "# in the man page.", "#", "SBD_STARTMODE=\"clean\"", "", "## Type: yesno / integer", "## Default: no", "#", "# Whether to delay after starting sbd on boot for \"msgwait\" seconds.", "# This may be necessary if your cluster nodes reboot so fast that the", "# other nodes are still waiting in the fence acknowledgement phase.", "# This is an occasional issue with virtual machines.", "#", "# This can also be enabled by being set to a specific delay value, in", "# seconds. Sometimes a longer delay than the default, \"msgwait\", is", "# needed, for example in the cases where it's considered to be safer to", "# wait longer than:", "# corosync token timeout + consensus timeout + pcmk_delay_max + msgwait", "#", "# Be aware that the special value \"1\" means \"yes\" rather than \"1s\".", "#", "# Consider that you might have to adapt the startup-timeout accordingly", "# if the default isn't sufficient. (TimeoutStartSec for systemd)", "#", "# This option may be ignored at a later point, once pacemaker handles", "# this case better.", "#", "SBD_DELAY_START=\"2\"", "", "## Type: string", "## Default: /dev/watchdog", "#", "# Watchdog device to use. If set to /dev/null, no watchdog device will", "# be used.", "#", "SBD_WATCHDOG_DEV=\"/dev/null\"", "", "## Type: integer", "## Default: 5", "#", "# How long, in seconds, the watchdog will wait before panicking the", "# node if no-one tickles it.", "#", "# This depends mostly on your storage latency; the majority of devices", "# must be successfully read within this time, or else the node will", "# self-fence.", "#", "# If your sbd device(s) reside on a multipath setup or iSCSI, this", "# should be the time required to detect a path failure.", "#", "# Be aware that watchdog timeout set in the on-disk metadata takes", "# precedence.", "#", "SBD_WATCHDOG_TIMEOUT=\"10\"", "", "## Type: string", "## Default: \"flush,reboot\"", "#", "# Actions to be executed when the watchers don't timely report to the sbd", "# main process or one of the watchers detects that the main process has", "# died.", "#", "# Set timeout-action to comma-separated combination of", "# noflush|flush plus reboot|crashdump|off.", "# If just one of both is given the other stays at the default.", "#", "# This doesn't affect actions like off, crashdump, reboot explicitly", "# triggered via message slots.", "# And it does as well not configure the action a watchdog would", "# trigger should it run off (there is no generic interface).", "#", "SBD_TIMEOUT_ACTION=\"reboot,flush\"", "", "## Type: yesno / auto", "## Default: auto", "#", "# If CPUAccounting is enabled default is not to assign any RT-budget", "# to the system.slice which prevents sbd from running RR-scheduled.", "#", "# One way to escape that issue is to move sbd-processes from the", "# slice they were originally started to root-slice.", "# Of course starting sbd in a certain slice might be intentional.", "# Thus in auto-mode sbd will check if the slice has RT-budget assigned.", "# If that is the case sbd will stay in that slice while it will", "# be moved to root-slice otherwise.", "#", "SBD_MOVE_TO_ROOT_CGROUP=auto", "", "## Type: yesno", "## Default: yes", "#", "# If resource startup syncing is enabled then pacemakerd is", "# gonna wait to be pinged via IPC before it starts resources.", "# On shutdown pacemakerd is going to wait in a state where it", "# has cleanly shutdown resources till sbd fetches that state.", "#", "# The default is set when building SBD and Pacemaker from source.", "# Going for 'no' is safer if it can't be assured that SBD and", "# Pacemaker installed do both support the synchronization feature.", "# When going with 'yes' - also using package dependencies to", "# assure SBD & Pacemaker both support the synchronization", "# feature and are assuming the same default - an SBD configuration", "# inherited via an upgrade doesn't have to be altered to still", "# benefit from the new feature.", "#", "SBD_SYNC_RESOURCE_STARTUP=yes", "", "## Type: string", "## Default: \"\"", "#", "# Additional options for starting sbd", "#", "SBD_OPTS=\"-n localhost\"" ] }, "changed": false } TASK [Print SBD config lines] ************************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:99 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.024) 0:02:46.893 ******** ok: [sut] => { "__test_sbd_config_lines": [ "#", "# Ansible managed", "#", "# system_role:ha_cluster", "", "", "", "## Type: string", "## Default: \"\"", "#", "# SBD_DEVICE specifies the devices to use for exchanging sbd messages", "# and to monitor. If specifying more than one path, use \";\" as", "# separator.", "#", "SBD_DEVICE=\"/dev/loop0\"", "", "## Type: yesno", "## Default: yes", "#", "# Whether to enable the pacemaker integration.", "#", "SBD_PACEMAKER=yes", "", "## Type: always / clean", "## Default: always", "#", "# Specify the start mode for sbd. Setting this to \"clean\" will only", "# allow sbd to start if it was not previously fenced. See the -S option", "# in the man page.", "#", "SBD_STARTMODE=\"clean\"", "", "## Type: yesno / integer", "## Default: no", "#", "# Whether to delay after starting sbd on boot for \"msgwait\" seconds.", "# This may be necessary if your cluster nodes reboot so fast that the", "# other nodes are still waiting in the fence acknowledgement phase.", "# This is an occasional issue with virtual machines.", "#", "# This can also be enabled by being set to a specific delay value, in", "# seconds. Sometimes a longer delay than the default, \"msgwait\", is", "# needed, for example in the cases where it's considered to be safer to", "# wait longer than:", "# corosync token timeout + consensus timeout + pcmk_delay_max + msgwait", "#", "# Be aware that the special value \"1\" means \"yes\" rather than \"1s\".", "#", "# Consider that you might have to adapt the startup-timeout accordingly", "# if the default isn't sufficient. (TimeoutStartSec for systemd)", "#", "# This option may be ignored at a later point, once pacemaker handles", "# this case better.", "#", "SBD_DELAY_START=\"2\"", "", "## Type: string", "## Default: /dev/watchdog", "#", "# Watchdog device to use. If set to /dev/null, no watchdog device will", "# be used.", "#", "SBD_WATCHDOG_DEV=\"/dev/null\"", "", "## Type: integer", "## Default: 5", "#", "# How long, in seconds, the watchdog will wait before panicking the", "# node if no-one tickles it.", "#", "# This depends mostly on your storage latency; the majority of devices", "# must be successfully read within this time, or else the node will", "# self-fence.", "#", "# If your sbd device(s) reside on a multipath setup or iSCSI, this", "# should be the time required to detect a path failure.", "#", "# Be aware that watchdog timeout set in the on-disk metadata takes", "# precedence.", "#", "SBD_WATCHDOG_TIMEOUT=\"10\"", "", "## Type: string", "## Default: \"flush,reboot\"", "#", "# Actions to be executed when the watchers don't timely report to the sbd", "# main process or one of the watchers detects that the main process has", "# died.", "#", "# Set timeout-action to comma-separated combination of", "# noflush|flush plus reboot|crashdump|off.", "# If just one of both is given the other stays at the default.", "#", "# This doesn't affect actions like off, crashdump, reboot explicitly", "# triggered via message slots.", "# And it does as well not configure the action a watchdog would", "# trigger should it run off (there is no generic interface).", "#", "SBD_TIMEOUT_ACTION=\"reboot,flush\"", "", "## Type: yesno / auto", "## Default: auto", "#", "# If CPUAccounting is enabled default is not to assign any RT-budget", "# to the system.slice which prevents sbd from running RR-scheduled.", "#", "# One way to escape that issue is to move sbd-processes from the", "# slice they were originally started to root-slice.", "# Of course starting sbd in a certain slice might be intentional.", "# Thus in auto-mode sbd will check if the slice has RT-budget assigned.", "# If that is the case sbd will stay in that slice while it will", "# be moved to root-slice otherwise.", "#", "SBD_MOVE_TO_ROOT_CGROUP=auto", "", "## Type: yesno", "## Default: yes", "#", "# If resource startup syncing is enabled then pacemakerd is", "# gonna wait to be pinged via IPC before it starts resources.", "# On shutdown pacemakerd is going to wait in a state where it", "# has cleanly shutdown resources till sbd fetches that state.", "#", "# The default is set when building SBD and Pacemaker from source.", "# Going for 'no' is safer if it can't be assured that SBD and", "# Pacemaker installed do both support the synchronization feature.", "# When going with 'yes' - also using package dependencies to", "# assure SBD & Pacemaker both support the synchronization", "# feature and are assuming the same default - an SBD configuration", "# inherited via an upgrade doesn't have to be altered to still", "# benefit from the new feature.", "#", "SBD_SYNC_RESOURCE_STARTUP=yes", "", "## Type: string", "## Default: \"\"", "#", "# Additional options for starting sbd", "#", "SBD_OPTS=\"-n localhost\"" ] } TASK [Check SBD config] ******************************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:103 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.017) 0:02:46.910 ******** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check firewall and selinux state] **************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:116 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.017) 0:02:46.928 ******** included: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_firewall_selinux.yml for sut TASK [Check firewall service status] ******************************************* task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_firewall_selinux.yml:6 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.025) 0:02:46.954 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [Check firewall port status] ********************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_firewall_selinux.yml:12 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.021) 0:02:46.975 ******** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [Get associated selinux ports] ******************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_firewall_selinux.yml:25 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.020) 0:02:46.995 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [Check associated selinux ports] ****************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_firewall_selinux.yml:33 Saturday 23 March 2024 12:06:24 +0000 (0:00:00.017) 0:02:47.012 ******** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [Check header for ansible_managed, fingerprint] *************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/template_sbd_all_options.yml:119 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.016) 0:02:47.029 ******** included: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_header.yml for sut TASK [Get file] **************************************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_header.yml:3 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.025) 0:02:47.055 ******** skipping: [sut] => { "changed": false, "false_condition": "__file_content is not defined", "skip_reason": "Conditional result was False" } TASK [Check for presence of ansible managed header, fingerprint] *************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tasks/check_header.yml:9 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.016) 0:02:47.071 ******** ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Unset SBD devices and watchdog variables] ******************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:52 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.044) 0:02:47.116 ******** ok: [sut] => { "ansible_facts": { "__test_node_options": null }, "changed": false } TASK [Clean up test environment for SBD] *************************************** task path: /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:56 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.014) 0:02:47.131 ******** TASK [fedora.linux_system_roles.ha_cluster : Unmount SBD devices] ************** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_sbd.yml:9 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.029) 0:02:47.161 ******** changed: [sut] => { "changed": true, "cmd": [ "losetup", "-d", "/dev/loop0" ], "delta": "0:00:00.004064", "end": "2024-03-23 12:06:25.346529", "rc": 0, "start": "2024-03-23 12:06:25.342465" } TASK [fedora.linux_system_roles.ha_cluster : Delete backing files for SBD devices] *** task path: /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_sbd.yml:14 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.236) 0:02:47.398 ******** ok: [sut] => { "changed": false, "path": "{'changed': True, 'path': '/tmp/ansible.y5zhny0c_ha_cluster_tests', 'uid': 0, 'gid': 0, 'owner': 'root', 'group': 'root', 'mode': '0600', 'state': 'file', 'secontext': 'unconfined_u:object_r:user_tmp_t:s0', 'size': 0, 'failed': False}", "state": "absent" } PLAY RECAP ********************************************************************* sut : ok=128 changed=41 unreachable=0 failed=0 skipped=80 rescued=0 ignored=0 Saturday 23 March 2024 12:06:25 +0000 (0:00:00.255) 0:02:47.653 ******** =============================================================================== fedora.linux_system_roles.ha_cluster : Install cluster packages -------- 60.91s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:44 fedora.linux_system_roles.ha_cluster : Wait for the cluster to fully start and form membership -- 26.69s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:75 fedora.linux_system_roles.ha_cluster : Install role essential packages -- 19.19s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot --- 7.00s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 fedora.linux_system_roles.ha_cluster : Start pacemaker ------------------ 4.24s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:69 fedora.linux_system_roles.ha_cluster : Get services status - detect pacemaker --- 2.69s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:135 fedora.linux_system_roles.ha_cluster : Populate service facts ----------- 2.67s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3 fedora.linux_system_roles.ha_cluster : Get services status - detect corosync-qdevice --- 2.60s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:21 fedora.linux_system_roles.ha_cluster : Get services status - detect SBD --- 2.60s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:16 fedora.linux_system_roles.ha_cluster : Remove qnetd configuration ------- 2.17s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-qnetd.yml:3 fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities ----------- 1.80s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141 fedora.linux_system_roles.ha_cluster : Enable or disable configured cluster services on boot --- 1.74s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_common/cluster-enable-disable.yml:3 fedora.linux_system_roles.ha_cluster : Fetch pcsd capabilities ---------- 1.46s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:155 fedora.linux_system_roles.ha_cluster : Pcs auth using pcs-0.10 ---------- 1.43s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:3 fedora.linux_system_roles.ha_cluster : Write CIB configuration ---------- 1.23s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/create-and-push-cib.yml:33 fedora.linux_system_roles.ha_cluster : Start corosync ------------------- 1.04s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-start-and-reload.yml:47 fedora.linux_system_roles.ha_cluster : Remove qdevice certificates [API] --- 1.04s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:46 fedora.linux_system_roles.ha_cluster : Set stonith-watchdog-timeout cluster property in CIB --- 1.02s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:142 Gathering Facts --------------------------------------------------------- 1.01s /WORKDIR/git-weekly-ciglljd_4n/tests/tests_sbd_all_options_play.yml:9 --------- fedora.linux_system_roles.ha_cluster : Check pcs auth status ------------ 1.00s /WORKDIR/git-weekly-ciglljd_4n/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:3 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: ANSIBLE_COLLECTIONS_PATHS option, does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead. This feature will be removed from ansible-core in version 2.19. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [DEPRECATION WARNING]: Encryption using the Python crypt module is deprecated. The Python crypt module is deprecated and will be removed from Python 3.13. Install the passlib library for continued encryption functionality. This feature will be removed in version 2.17. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---