# STDOUT: ---v---v---v---v---v--- ansible-playbook [core 2.16.0] config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.16/lib/python3.11/site-packages/ansible ansible collection location = /WORKDIR/git-suseibi8cjjt/.collection executable location = /opt/ansible-2.16/bin/ansible-playbook python version = 3.11.5 (main, Sep 7 2023, 00:00:00) [GCC 11.4.1 20230605 (Red Hat 11.4.1-2)] (/opt/ansible-2.16/bin/python) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_qdevice_minimal.yml ******************************************** 2 plays in /WORKDIR/git-suseibi8cjjt/tests/tests_qdevice_minimal.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /WORKDIR/git-suseibi8cjjt/tests/tests_qdevice_minimal.yml:4 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.022) 0:00:00.022 ***** ok: [sut] => { "ansible_facts": { "ha_cluster_hacluster_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n31303833633366333561656439323930303361333161363239346166656537323933313436\n3432386236656563343237306335323637396239616230353561330a313731623238393238\n62343064666336643930663239383936616465643134646536656532323461356237646133\n3761616633323839633232353637366266350a313163633236376666653238633435306565\n3264623032333736393535663833\n" } }, "ansible_included_var_files": [ "/WORKDIR/git-suseibi8cjjt/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test qdevice - minimal configuration] ************************************ TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-suseibi8cjjt/tests/tests_qdevice_minimal.yml:9 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.013) 0:00:00.035 ***** ok: [sut] TASK [Set qnetd address] ******************************************************* task path: /WORKDIR/git-suseibi8cjjt/tests/tests_qdevice_minimal.yml:14 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.770) 0:00:00.806 ***** ok: [sut] => { "ansible_facts": { "__test_qnetd_address": "localhost" }, "changed": false } TASK [Run test] **************************************************************** task path: /WORKDIR/git-suseibi8cjjt/tests/tests_qdevice_minimal.yml:18 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.018) 0:00:00.825 ***** included: /WORKDIR/git-suseibi8cjjt/tests/template_qdevice.yml for sut TASK [Set up test environment] ************************************************* task path: /WORKDIR/git-suseibi8cjjt/tests/template_qdevice.yml:7 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.013) 0:00:00.838 ***** TASK [fedora.linux_system_roles.ha_cluster : Set node name to 'localhost' for single-node clusters] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:9 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.019) 0:00:00.858 ***** ok: [sut] => { "ansible_facts": { "inventory_hostname": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Ensure facts used by tests] ******* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:14 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.019) 0:00:00.878 ***** skipping: [sut] => { "changed": false, "false_condition": "'distribution' not in ansible_facts", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:22 Thursday 08 February 2024 19:41:53 +0000 (0:00:00.009) 0:00:00.887 ***** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:27 Thursday 08 February 2024 19:41:54 +0000 (0:00:00.244) 0:00:01.131 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Do not try to enable RHEL repositories] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:32 Thursday 08 February 2024 19:41:54 +0000 (0:00:00.014) 0:00:01.146 ***** skipping: [sut] => { "changed": false, "false_condition": "ansible_distribution == 'RedHat'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Copy nss-altfiles ha_cluster users to /etc/passwd] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup.yml:41 Thursday 08 February 2024 19:41:54 +0000 (0:00:00.008) 0:00:01.154 ***** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_is_ostree | d(false)", "skip_reason": "Conditional result was False" } TASK [Clean up test environment for qnetd] ************************************* task path: /WORKDIR/git-suseibi8cjjt/tests/template_qdevice.yml:12 Thursday 08 February 2024 19:41:54 +0000 (0:00:00.018) 0:00:01.173 ***** TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 Thursday 08 February 2024 19:41:54 +0000 (0:00:00.020) 0:00:01.193 ***** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd config files are not present] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:16 Thursday 08 February 2024 19:41:56 +0000 (0:00:02.560) 0:00:03.754 ***** ok: [sut] => { "changed": false, "path": "/etc/corosync/qnetd", "state": "absent" } TASK [Set up test environment for qnetd] *************************************** task path: /WORKDIR/git-suseibi8cjjt/tests/template_qdevice.yml:17 Thursday 08 February 2024 19:41:56 +0000 (0:00:00.259) 0:00:04.013 ***** TASK [fedora.linux_system_roles.ha_cluster : Install qnetd packages] *********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_qnetd.yml:9 Thursday 08 February 2024 19:41:57 +0000 (0:00:00.021) 0:00:04.034 ***** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: libknet1-compress-plugins-all-1.28-1.el8.x86_64", "Installed: perl-TimeDate-1:2.30-15.module_el8+336+32327ac4.noarch", "Installed: lvm2-libs-8:2.03.14-9.el8.x86_64", "Installed: libknet1-compress-zlib-plugin-1.28-1.el8.x86_64", "Installed: libknet1-crypto-nss-plugin-1.28-1.el8.x86_64", "Installed: net-snmp-libs-1:5.8-28.el8.x86_64", "Installed: net-tools-2.0-0.52.20160912git.el8.x86_64", "Installed: libknet1-crypto-openssl-plugin-1.28-1.el8.x86_64", "Installed: libknet1-compress-lzma-plugin-1.28-1.el8.x86_64", "Installed: device-mapper-event-8:1.02.181-9.el8.x86_64", "Installed: overpass-fonts-3.0.2-3.el8.noarch", "Installed: libknet1-crypto-plugins-all-1.28-1.el8.x86_64", "Installed: libknet1-plugins-all-1.28-1.el8.x86_64", "Installed: ruby-irb-2.5.9-111.module_el8+475+35a6c697.noarch", "Installed: device-mapper-event-libs-8:1.02.181-9.el8.x86_64", "Installed: python3-pyparsing-2.1.10-7.el8.noarch", "Installed: rubygem-did_you_mean-1.2.0-111.module_el8+475+35a6c697.noarch", "Installed: libwbclient-4.19.4-3.el8.x86_64", "Installed: pacemaker-cluster-libs-2.1.7-3.el8.x86_64", "Installed: libnozzle1-1.28-1.el8.x86_64", "Installed: pacemaker-2.1.7-3.el8.x86_64", "Installed: centos-logos-85.8-2.el8.x86_64", "Installed: rubygem-rdoc-6.0.1.1-111.module_el8+475+35a6c697.noarch", "Installed: ruby-libs-2.5.9-111.module_el8+475+35a6c697.x86_64", "Installed: clufter-bin-0.77.1-5.el8.x86_64", "Installed: pacemaker-libs-2.1.7-3.el8.x86_64", "Installed: rubygem-io-console-0.4.6-111.module_el8+475+35a6c697.x86_64", "Installed: clufter-common-0.77.1-5.el8.noarch", "Installed: cifs-utils-7.0-1.el8.x86_64", "Installed: pacemaker-cli-2.1.7-3.el8.x86_64", "Installed: pacemaker-schemas-2.1.7-3.el8.noarch", "Installed: corosync-3.1.8-1.el8.x86_64", "Installed: nss-tools-3.90.0-4.el8.x86_64", "Installed: liberation-fonts-common-1:2.00.3-7.el8.noarch", "Installed: corosync-qnetd-3.0.2-2.el8.x86_64", "Installed: device-mapper-persistent-data-0.9.0-7.el8.x86_64", "Installed: python3-pycurl-7.43.0.2-4.el8.x86_64", "Installed: liberation-sans-fonts-1:2.00.3-7.el8.noarch", "Installed: ruby-2.5.9-111.module_el8+475+35a6c697.x86_64", "Installed: rubygem-openssl-2.1.2-111.module_el8+475+35a6c697.x86_64", "Installed: libaio-0.3.112-1.el8.x86_64", "Installed: corosynclib-3.1.8-1.el8.x86_64", "Installed: samba-client-libs-4.19.4-3.el8.x86_64", "Installed: libqb-1.0.3-13.el8.x86_64", "Installed: samba-common-4.19.4-3.el8.noarch", "Installed: pcs-0.10.18-1.el8.x86_64", "Installed: samba-common-libs-4.19.4-3.el8.x86_64", "Installed: python3-clufter-0.77.1-5.el8.noarch", "Installed: rubygem-json-2.1.0-111.module_el8+475+35a6c697.x86_64", "Installed: fontpackages-filesystem-1.44-22.el8.noarch", "Installed: rubygem-bigdecimal-1.3.4-111.module_el8+475+35a6c697.x86_64", "Installed: rubygems-2.7.6.3-111.module_el8+475+35a6c697.noarch", "Installed: libicu-60.3-2.el8_1.x86_64", "Installed: libknet1-1.28-1.el8.x86_64", "Installed: resource-agents-4.9.0-53.el8.x86_64", "Installed: libknet1-compress-bzip2-plugin-1.28-1.el8.x86_64", "Installed: libknet1-compress-lz4-plugin-1.28-1.el8.x86_64", "Installed: lvm2-8:2.03.14-9.el8.x86_64", "Installed: rubygem-psych-3.0.2-111.module_el8+475+35a6c697.x86_64", "Installed: libknet1-compress-lzo2-plugin-1.28-1.el8.x86_64", "Installed: bzip2-1.0.6-26.el8.x86_64" ] } TASK [fedora.linux_system_roles.ha_cluster : Set up qnetd] ********************* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_qnetd.yml:17 Thursday 08 February 2024 19:42:16 +0000 (0:00:19.148) 0:00:23.182 ***** changed: [sut] => { "changed": true, "cmd": [ "pcs", "--start", "--", "qdevice", "setup", "model", "net" ], "delta": "0:00:02.026734", "end": "2024-02-08 19:42:18.426308", "failed_when_result": false, "rc": 0, "start": "2024-02-08 19:42:16.399574" } STDOUT: Quorum device 'net' initialized Starting quorum device... quorum device started TASK [Back up qnetd] *********************************************************** task path: /WORKDIR/git-suseibi8cjjt/tests/template_qdevice.yml:23 Thursday 08 February 2024 19:42:18 +0000 (0:00:02.289) 0:00:25.472 ***** included: /WORKDIR/git-suseibi8cjjt/tests/tasks/qnetd_backup_restore.yml for sut TASK [Create /etc/corosync/qnetd_backup directory] ***************************** task path: /WORKDIR/git-suseibi8cjjt/tests/tasks/qnetd_backup_restore.yml:6 Thursday 08 February 2024 19:42:18 +0000 (0:00:00.015) 0:00:25.487 ***** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/corosync/qnetd_backup", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Back up qnetd settings] ************************************************** task path: /WORKDIR/git-suseibi8cjjt/tests/tasks/qnetd_backup_restore.yml:13 Thursday 08 February 2024 19:42:18 +0000 (0:00:00.279) 0:00:25.767 ***** changed: [sut] => { "changed": true, "cmd": [ "cp", "--preserve=all", "--recursive", "/etc/corosync/qnetd", "/etc/corosync/qnetd_backup" ], "delta": "0:00:00.003499", "end": "2024-02-08 19:42:18.958049", "rc": 0, "start": "2024-02-08 19:42:18.954550" } TASK [Restore qnetd settings] ************************************************** task path: /WORKDIR/git-suseibi8cjjt/tests/tasks/qnetd_backup_restore.yml:23 Thursday 08 February 2024 19:42:18 +0000 (0:00:00.224) 0:00:25.991 ***** skipping: [sut] => { "changed": false, "false_condition": "operation == \"restore\"", "skip_reason": "Conditional result was False" } TASK [Start qnetd] ************************************************************* task path: /WORKDIR/git-suseibi8cjjt/tests/tasks/qnetd_backup_restore.yml:30 Thursday 08 February 2024 19:42:18 +0000 (0:00:00.009) 0:00:26.000 ***** skipping: [sut] => { "changed": false, "false_condition": "operation == \"restore\"", "skip_reason": "Conditional result was False" } TASK [Run HA Cluster role] ***************************************************** task path: /WORKDIR/git-suseibi8cjjt/tests/template_qdevice.yml:29 Thursday 08 February 2024 19:42:18 +0000 (0:00:00.008) 0:00:26.008 ***** TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:3 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.036) 0:00:26.045 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Ensure ansible_facts used by role] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:2 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.013) 0:00:26.058 ***** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Check if system is ostree] ******** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:10 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.018) 0:00:26.077 ***** skipping: [sut] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set flag to indicate system is ostree] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:15 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.011) 0:00:26.089 ***** skipping: [sut] => { "changed": false, "false_condition": "not __ha_cluster_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set platform/version specific variables] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:19 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.011) 0:00:26.100 ***** ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_fence_agent_packages": "{{ ['fence-agents-all'] + (['fence-virt'] if ansible_architecture == 'x86_64' else []) }}", "__ha_cluster_fullstack_node_packages": [ "corosync", "libknet1-plugins-all", "resource-agents", "pacemaker", "openssl" ], "__ha_cluster_pcs_provider": "pcs-0.10", "__ha_cluster_qdevice_node_packages": [ "corosync-qdevice", "bash", "coreutils", "curl", "grep", "nss-tools", "openssl", "sed" ], "__ha_cluster_repos": [], "__ha_cluster_role_essential_packages": [ "pcs", "corosync-qnetd" ], "__ha_cluster_sbd_packages": [ "sbd" ], "__ha_cluster_services": [ "corosync", "corosync-qdevice", "pacemaker" ] }, "ansible_included_var_files": [ "/WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [sut] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__ha_cluster_repos": [ { "id": "ha", "name": "HighAvailability" }, { "id": "resilientstorage", "name": "ResilientStorage" } ] }, "ansible_included_var_files": [ "/WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [sut] => (item=CentOS_8.yml) => { "ansible_facts": { "__ha_cluster_repos": [ { "id": "ha", "name": "HighAvailability" }, { "id": "resilientstorage", "name": "ResilientStorage" } ] }, "ansible_included_var_files": [ "/WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.ha_cluster : Set Linux Pacemaker shell specific variables] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/set_vars.yml:34 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.028) 0:00:26.129 ***** ok: [sut] => { "ansible_facts": {}, "ansible_included_var_files": [ "/WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/vars/shell_pcs.yml" ], "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Enable package repositories] ****** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:6 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.011) 0:00:26.141 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Find platform/version specific tasks to enable repositories] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:3 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.017) 0:00:26.158 ***** ok: [sut] => (item=RedHat.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/RedHat.yml" }, "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } ok: [sut] => (item=CentOS.yml) => { "ansible_facts": { "__ha_cluster_enable_repo_tasks_file": "/WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml" }, "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml" } skipping: [sut] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=CentOS_8.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__ha_cluster_enable_repo_tasks_file_candidate is file", "item": "CentOS_8.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Run platform/version specific tasks to enable repositories] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-package-repositories.yml:21 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.028) 0:00:26.187 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml for sut TASK [fedora.linux_system_roles.ha_cluster : List active CentOS repositories] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml:3 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.025) 0:00:26.212 ***** ok: [sut] => { "changed": false, "cmd": [ "dnf", "repolist" ], "delta": "0:00:00.238520", "end": "2024-02-08 19:42:19.600926", "rc": 0, "start": "2024-02-08 19:42:19.362406" } STDOUT: repo id repo name appstream CentOS Stream 8 - AppStream baseos CentOS Stream 8 - BaseOS beaker-client Beaker Client - RedHatEnterpriseLinux8 beaker-harness Beaker harness beaker-tasks Beaker tasks beakerlib-libraries Copr repo for beakerlib-libraries owned by bgoncalv copr:copr.devel.redhat.com:lpol:qa-tools Copr repo for qa-tools owned by lpol extras CentOS Stream 8 - Extras extras-common CentOS Stream 8 - Extras common packages ha CentOS Stream 8 - HighAvailability TASK [fedora.linux_system_roles.ha_cluster : Enable CentOS repositories] ******* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/enable-repositories/CentOS.yml:10 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.423) 0:00:26.636 ***** skipping: [sut] => (item={'id': 'ha', 'name': 'HighAvailability'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item.id not in __ha_cluster_repolist.stdout", "item": { "id": "ha", "name": "HighAvailability" }, "skip_reason": "Conditional result was False" } skipping: [sut] => (item={'id': 'resilientstorage', 'name': 'ResilientStorage'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item.name != \"ResilientStorage\" or ha_cluster_enable_repos_resilient_storage", "item": { "id": "resilientstorage", "name": "ResilientStorage" }, "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Install role essential packages] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 Thursday 08 February 2024 19:42:19 +0000 (0:00:00.013) 0:00:26.649 ***** ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.ha_cluster : Check and prepare role variables] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:17 Thursday 08 February 2024 19:42:22 +0000 (0:00:02.469) 0:00:29.118 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Fail if passwords are not specified] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:5 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.021) 0:00:29.140 ***** skipping: [sut] => (item=ha_cluster_hacluster_password) => { "ansible_loop_var": "item", "changed": false, "false_condition": "lookup(\"vars\", item, default=\"\") | string | length < 1", "item": "ha_cluster_hacluster_password", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Fail if nodes do not have the same number of SBD devices specified] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:15 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.030) 0:00:29.170 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_sbd_enabled", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if configuring qnetd on a cluster node] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:30 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.009) 0:00:29.180 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_qnetd.present | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no valid level is specified for a fencing level] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:40 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.018) 0:00:29.198 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Fail if no target is specified for a fencing level] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:48 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.008) 0:00:29.206 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Discover cluster node names] ****** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:62 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.008) 0:00:29.215 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_node_name": "localhost" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Collect cluster node names] ******* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:66 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.020) 0:00:29.235 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_all_node_names": [ "localhost" ] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Extract qdevice settings] ********* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:70 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.022) 0:00:29.258 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_qdevice_host": "localhost", "__ha_cluster_qdevice_in_use": true, "__ha_cluster_qdevice_model": "net", "__ha_cluster_qdevice_pcs_address": "" }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Figure out if ATB needs to be enabled for SBD] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:79 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.027) 0:00:29.286 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_sbd_needs_atb": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if SBD needs ATB enabled and the user configured ATB to be disabled] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:89 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.023) 0:00:29.309 ***** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_sbd_needs_atb | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fail if ha_cluster_pcsd_public_key_src and ha_cluster_pcsd_private_key_src are set along with ha_cluster_pcsd_certificates] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:96 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.017) 0:00:29.327 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is not none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities] *********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:110 Thursday 08 February 2024 19:42:22 +0000 (0:00:00.011) 0:00:29.339 ***** ok: [sut] => { "changed": false, "cmd": [ "pcs", "--version", "--full" ], "delta": "0:00:01.547198", "end": "2024-02-08 19:42:24.035211", "rc": 0, "start": "2024-02-08 19:42:22.488013" } STDOUT: 0.10.18 booth booth.enable-authfile.set booth.enable-authfile.unset cluster.config.backup-local cluster.config.restore-cluster cluster.config.restore-local cluster.config.uuid cluster.create cluster.create.enable cluster.create.local cluster.create.no-keys-sync cluster.create.separated-name-and-address cluster.create.start cluster.create.start.wait cluster.create.transport.knet cluster.create.transport.udp-udpu cluster.create.transport.udp-udpu.no-rrp cluster.destroy cluster.destroy.all cluster.report cluster.verify corosync.authkey.update corosync.config.get corosync.config.get.struct corosync.config.reload corosync.config.sync-to-local-cluster corosync.config.update corosync.link.add corosync.link.remove corosync.link.remove.list corosync.link.update corosync.qdevice corosync.qdevice.model.net corosync.quorum corosync.quorum.device corosync.quorum.device.heuristics corosync.quorum.device.model.net corosync.quorum.device.model.net.options_tls_and_kaptb corosync.quorum.set-expected-votes-runtime corosync.quorum.status corosync.quorum.unblock corosync.totem.block_unlisted_ips corosync.uidgid node.add node.add.enable node.add.separated-name-and-address node.add.start node.add.start.wait node.attributes node.attributes.set-list-for-node node.confirm-off node.fence node.guest node.kill node.maintenance node.maintenance.all node.maintenance.list node.maintenance.wait node.remote node.remote.onfail-demote node.remove node.remove-from-caches node.remove.list node.standby node.standby.all node.standby.list node.standby.wait node.start-stop-enable-disable node.start-stop-enable-disable.all node.start-stop-enable-disable.list node.start-stop-enable-disable.start-wait node.utilization node.utilization.set-list-for-node pcmk.acl.enable-disable pcmk.acl.group pcmk.acl.role pcmk.acl.role.create-with-permissions pcmk.acl.role.delete-with-users-groups pcmk.acl.user pcmk.alert pcmk.cib.checkpoints pcmk.cib.checkpoints.diff pcmk.cib.edit pcmk.cib.get pcmk.cib.get.scope pcmk.cib.roles.promoted-unpromoted pcmk.cib.set pcmk.constraint.colocation.set pcmk.constraint.colocation.set.options pcmk.constraint.colocation.simple pcmk.constraint.colocation.simple.options pcmk.constraint.hide-expired pcmk.constraint.location.simple pcmk.constraint.location.simple.options pcmk.constraint.location.simple.resource-regexp pcmk.constraint.location.simple.rule pcmk.constraint.location.simple.rule.node-attr-type-number pcmk.constraint.location.simple.rule.options pcmk.constraint.location.simple.rule.rule-add-remove pcmk.constraint.no-autocorrect pcmk.constraint.order.set pcmk.constraint.order.set.options pcmk.constraint.order.simple pcmk.constraint.order.simple.options pcmk.constraint.ticket.set pcmk.constraint.ticket.set.options pcmk.constraint.ticket.simple pcmk.constraint.ticket.simple.constraint-id pcmk.properties.cluster pcmk.properties.cluster.config.output-formats pcmk.properties.cluster.defaults pcmk.properties.cluster.describe pcmk.properties.cluster.describe.output-formats pcmk.properties.operation-defaults pcmk.properties.operation-defaults.multiple pcmk.properties.operation-defaults.rule pcmk.properties.operation-defaults.rule-rsc-op pcmk.properties.operation-defaults.rule.hide-expired pcmk.properties.operation-defaults.rule.node-attr-type-number pcmk.properties.resource-defaults pcmk.properties.resource-defaults.multiple pcmk.properties.resource-defaults.rule pcmk.properties.resource-defaults.rule-rsc-op pcmk.properties.resource-defaults.rule.hide-expired pcmk.properties.resource-defaults.rule.node-attr-type-number pcmk.resource.ban-move-clear pcmk.resource.ban-move-clear.bundles pcmk.resource.ban-move-clear.clear-expired pcmk.resource.ban-move-clear.clone pcmk.resource.bundle pcmk.resource.bundle.container-docker pcmk.resource.bundle.container-docker.promoted-max pcmk.resource.bundle.container-podman pcmk.resource.bundle.container-podman.promoted-max pcmk.resource.bundle.container-rkt pcmk.resource.bundle.container-rkt.promoted-max pcmk.resource.bundle.reset pcmk.resource.bundle.wait pcmk.resource.cleanup pcmk.resource.cleanup.one-resource pcmk.resource.cleanup.strict pcmk.resource.clone pcmk.resource.clone.custom-id pcmk.resource.clone.meta-in-create pcmk.resource.clone.wait pcmk.resource.config.output-formats pcmk.resource.create pcmk.resource.create.clone.custom-id pcmk.resource.create.in-existing-bundle pcmk.resource.create.meta pcmk.resource.create.no-master pcmk.resource.create.operations pcmk.resource.create.operations.onfail-demote pcmk.resource.create.promotable pcmk.resource.create.promotable.custom-id pcmk.resource.create.wait pcmk.resource.debug pcmk.resource.delete pcmk.resource.disable.safe pcmk.resource.disable.safe.brief pcmk.resource.disable.safe.tag pcmk.resource.disable.simulate pcmk.resource.disable.simulate.brief pcmk.resource.disable.simulate.tag pcmk.resource.enable-disable pcmk.resource.enable-disable.list pcmk.resource.enable-disable.tag pcmk.resource.enable-disable.wait pcmk.resource.failcount pcmk.resource.group pcmk.resource.group.add-remove-list pcmk.resource.group.wait pcmk.resource.manage-unmanage pcmk.resource.manage-unmanage.list pcmk.resource.manage-unmanage.tag pcmk.resource.manage-unmanage.with-monitor pcmk.resource.move.autoclean pcmk.resource.promotable pcmk.resource.promotable.custom-id pcmk.resource.promotable.meta-in-create pcmk.resource.promotable.wait pcmk.resource.refresh pcmk.resource.refresh.one-resource pcmk.resource.refresh.strict pcmk.resource.relations pcmk.resource.relocate pcmk.resource.restart pcmk.resource.update pcmk.resource.update-meta pcmk.resource.update-meta.list pcmk.resource.update-meta.wait pcmk.resource.update-operations pcmk.resource.update-operations.onfail-demote pcmk.resource.update.meta pcmk.resource.update.operations pcmk.resource.update.operations.onfail-demote pcmk.resource.update.wait pcmk.resource.utilization pcmk.resource.utilization-set-list-for-resource pcmk.stonith.cleanup pcmk.stonith.cleanup.one-resource pcmk.stonith.cleanup.strict pcmk.stonith.create pcmk.stonith.create.in-group pcmk.stonith.create.meta pcmk.stonith.create.operations pcmk.stonith.create.operations.onfail-demote pcmk.stonith.create.wait pcmk.stonith.delete pcmk.stonith.enable-disable pcmk.stonith.enable-disable.list pcmk.stonith.enable-disable.wait pcmk.stonith.history.cleanup pcmk.stonith.history.show pcmk.stonith.history.update pcmk.stonith.levels pcmk.stonith.levels.add-remove-devices-list pcmk.stonith.levels.clear pcmk.stonith.levels.node-attr pcmk.stonith.levels.node-regexp pcmk.stonith.levels.verify pcmk.stonith.refresh pcmk.stonith.refresh.one-resource pcmk.stonith.refresh.strict pcmk.stonith.update pcmk.stonith.update.scsi-devices pcmk.stonith.update.scsi-devices.add-remove pcmk.stonith.update.scsi-devices.mpath pcmk.tag pcmk.tag.resources pcs.auth.client pcs.auth.client.cluster pcs.auth.client.token pcs.auth.deauth-client pcs.auth.deauth-server pcs.auth.no-bidirectional pcs.auth.separated-name-and-address pcs.auth.server.token pcs.cfg-in-file.cib pcs.daemon-ssl-cert.set pcs.daemon-ssl-cert.sync-to-local-cluster pcs.disaster-recovery.essentials pcs.request-timeout resource-agents.describe resource-agents.list resource-agents.list.detailed resource-agents.ocf.version-1-0 resource-agents.ocf.version-1-1 resource-agents.self-validation sbd sbd.option-timeout-action sbd.shared-block-device status.corosync.membership status.pcmk.resources.hide-inactive status.pcmk.resources.id status.pcmk.resources.node status.pcmk.resources.orphaned status.pcmk.xml stonith-agents.describe stonith-agents.list stonith-agents.list.detailed stonith-agents.ocf.version-1-0 stonith-agents.ocf.version-1-1 stonith-agents.self-validation TASK [fedora.linux_system_roles.ha_cluster : Parse pcs capabilities] *********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:117 Thursday 08 February 2024 19:42:24 +0000 (0:00:01.732) 0:00:31.071 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_pcs_capabilities": [ "booth", "booth.enable-authfile.set", "booth.enable-authfile.unset", "cluster.config.backup-local", "cluster.config.restore-cluster", "cluster.config.restore-local", "cluster.config.uuid", "cluster.create", "cluster.create.enable", "cluster.create.local", "cluster.create.no-keys-sync", "cluster.create.separated-name-and-address", "cluster.create.start", "cluster.create.start.wait", "cluster.create.transport.knet", "cluster.create.transport.udp-udpu", "cluster.create.transport.udp-udpu.no-rrp", "cluster.destroy", "cluster.destroy.all", "cluster.report", "cluster.verify", "corosync.authkey.update", "corosync.config.get", "corosync.config.get.struct", "corosync.config.reload", "corosync.config.sync-to-local-cluster", "corosync.config.update", "corosync.link.add", "corosync.link.remove", "corosync.link.remove.list", "corosync.link.update", "corosync.qdevice", "corosync.qdevice.model.net", "corosync.quorum", "corosync.quorum.device", "corosync.quorum.device.heuristics", "corosync.quorum.device.model.net", "corosync.quorum.device.model.net.options_tls_and_kaptb", "corosync.quorum.set-expected-votes-runtime", "corosync.quorum.status", "corosync.quorum.unblock", "corosync.totem.block_unlisted_ips", "corosync.uidgid", "node.add", "node.add.enable", "node.add.separated-name-and-address", "node.add.start", "node.add.start.wait", "node.attributes", "node.attributes.set-list-for-node", "node.confirm-off", "node.fence", "node.guest", "node.kill", "node.maintenance", "node.maintenance.all", "node.maintenance.list", "node.maintenance.wait", "node.remote", "node.remote.onfail-demote", "node.remove", "node.remove-from-caches", "node.remove.list", "node.standby", "node.standby.all", "node.standby.list", "node.standby.wait", "node.start-stop-enable-disable", "node.start-stop-enable-disable.all", "node.start-stop-enable-disable.list", "node.start-stop-enable-disable.start-wait", "node.utilization", "node.utilization.set-list-for-node", "pcmk.acl.enable-disable", "pcmk.acl.group", "pcmk.acl.role", "pcmk.acl.role.create-with-permissions", "pcmk.acl.role.delete-with-users-groups", "pcmk.acl.user", "pcmk.alert", "pcmk.cib.checkpoints", "pcmk.cib.checkpoints.diff", "pcmk.cib.edit", "pcmk.cib.get", "pcmk.cib.get.scope", "pcmk.cib.roles.promoted-unpromoted", "pcmk.cib.set", "pcmk.constraint.colocation.set", "pcmk.constraint.colocation.set.options", "pcmk.constraint.colocation.simple", "pcmk.constraint.colocation.simple.options", "pcmk.constraint.hide-expired", "pcmk.constraint.location.simple", "pcmk.constraint.location.simple.options", "pcmk.constraint.location.simple.resource-regexp", "pcmk.constraint.location.simple.rule", "pcmk.constraint.location.simple.rule.node-attr-type-number", "pcmk.constraint.location.simple.rule.options", "pcmk.constraint.location.simple.rule.rule-add-remove", "pcmk.constraint.no-autocorrect", "pcmk.constraint.order.set", "pcmk.constraint.order.set.options", "pcmk.constraint.order.simple", "pcmk.constraint.order.simple.options", "pcmk.constraint.ticket.set", "pcmk.constraint.ticket.set.options", "pcmk.constraint.ticket.simple", "pcmk.constraint.ticket.simple.constraint-id", "pcmk.properties.cluster", "pcmk.properties.cluster.config.output-formats", "pcmk.properties.cluster.defaults", "pcmk.properties.cluster.describe", "pcmk.properties.cluster.describe.output-formats", "pcmk.properties.operation-defaults", "pcmk.properties.operation-defaults.multiple", "pcmk.properties.operation-defaults.rule", "pcmk.properties.operation-defaults.rule-rsc-op", "pcmk.properties.operation-defaults.rule.hide-expired", "pcmk.properties.operation-defaults.rule.node-attr-type-number", "pcmk.properties.resource-defaults", "pcmk.properties.resource-defaults.multiple", "pcmk.properties.resource-defaults.rule", "pcmk.properties.resource-defaults.rule-rsc-op", "pcmk.properties.resource-defaults.rule.hide-expired", "pcmk.properties.resource-defaults.rule.node-attr-type-number", "pcmk.resource.ban-move-clear", "pcmk.resource.ban-move-clear.bundles", "pcmk.resource.ban-move-clear.clear-expired", "pcmk.resource.ban-move-clear.clone", "pcmk.resource.bundle", "pcmk.resource.bundle.container-docker", "pcmk.resource.bundle.container-docker.promoted-max", "pcmk.resource.bundle.container-podman", "pcmk.resource.bundle.container-podman.promoted-max", "pcmk.resource.bundle.container-rkt", "pcmk.resource.bundle.container-rkt.promoted-max", "pcmk.resource.bundle.reset", "pcmk.resource.bundle.wait", "pcmk.resource.cleanup", "pcmk.resource.cleanup.one-resource", "pcmk.resource.cleanup.strict", "pcmk.resource.clone", "pcmk.resource.clone.custom-id", "pcmk.resource.clone.meta-in-create", "pcmk.resource.clone.wait", "pcmk.resource.config.output-formats", "pcmk.resource.create", "pcmk.resource.create.clone.custom-id", "pcmk.resource.create.in-existing-bundle", "pcmk.resource.create.meta", "pcmk.resource.create.no-master", "pcmk.resource.create.operations", "pcmk.resource.create.operations.onfail-demote", "pcmk.resource.create.promotable", "pcmk.resource.create.promotable.custom-id", "pcmk.resource.create.wait", "pcmk.resource.debug", "pcmk.resource.delete", "pcmk.resource.disable.safe", "pcmk.resource.disable.safe.brief", "pcmk.resource.disable.safe.tag", "pcmk.resource.disable.simulate", "pcmk.resource.disable.simulate.brief", "pcmk.resource.disable.simulate.tag", "pcmk.resource.enable-disable", "pcmk.resource.enable-disable.list", "pcmk.resource.enable-disable.tag", "pcmk.resource.enable-disable.wait", "pcmk.resource.failcount", "pcmk.resource.group", "pcmk.resource.group.add-remove-list", "pcmk.resource.group.wait", "pcmk.resource.manage-unmanage", "pcmk.resource.manage-unmanage.list", "pcmk.resource.manage-unmanage.tag", "pcmk.resource.manage-unmanage.with-monitor", "pcmk.resource.move.autoclean", "pcmk.resource.promotable", "pcmk.resource.promotable.custom-id", "pcmk.resource.promotable.meta-in-create", "pcmk.resource.promotable.wait", "pcmk.resource.refresh", "pcmk.resource.refresh.one-resource", "pcmk.resource.refresh.strict", "pcmk.resource.relations", "pcmk.resource.relocate", "pcmk.resource.restart", "pcmk.resource.update", "pcmk.resource.update-meta", "pcmk.resource.update-meta.list", "pcmk.resource.update-meta.wait", "pcmk.resource.update-operations", "pcmk.resource.update-operations.onfail-demote", "pcmk.resource.update.meta", "pcmk.resource.update.operations", "pcmk.resource.update.operations.onfail-demote", "pcmk.resource.update.wait", "pcmk.resource.utilization", "pcmk.resource.utilization-set-list-for-resource", "pcmk.stonith.cleanup", "pcmk.stonith.cleanup.one-resource", "pcmk.stonith.cleanup.strict", "pcmk.stonith.create", "pcmk.stonith.create.in-group", "pcmk.stonith.create.meta", "pcmk.stonith.create.operations", "pcmk.stonith.create.operations.onfail-demote", "pcmk.stonith.create.wait", "pcmk.stonith.delete", "pcmk.stonith.enable-disable", "pcmk.stonith.enable-disable.list", "pcmk.stonith.enable-disable.wait", "pcmk.stonith.history.cleanup", "pcmk.stonith.history.show", "pcmk.stonith.history.update", "pcmk.stonith.levels", "pcmk.stonith.levels.add-remove-devices-list", "pcmk.stonith.levels.clear", "pcmk.stonith.levels.node-attr", "pcmk.stonith.levels.node-regexp", "pcmk.stonith.levels.verify", "pcmk.stonith.refresh", "pcmk.stonith.refresh.one-resource", "pcmk.stonith.refresh.strict", "pcmk.stonith.update", "pcmk.stonith.update.scsi-devices", "pcmk.stonith.update.scsi-devices.add-remove", "pcmk.stonith.update.scsi-devices.mpath", "pcmk.tag", "pcmk.tag.resources", "pcs.auth.client", "pcs.auth.client.cluster", "pcs.auth.client.token", "pcs.auth.deauth-client", "pcs.auth.deauth-server", "pcs.auth.no-bidirectional", "pcs.auth.separated-name-and-address", "pcs.auth.server.token", "pcs.cfg-in-file.cib", "pcs.daemon-ssl-cert.set", "pcs.daemon-ssl-cert.sync-to-local-cluster", "pcs.disaster-recovery.essentials", "pcs.request-timeout", "resource-agents.describe", "resource-agents.list", "resource-agents.list.detailed", "resource-agents.ocf.version-1-0", "resource-agents.ocf.version-1-1", "resource-agents.self-validation", "sbd", "sbd.option-timeout-action", "sbd.shared-block-device", "status.corosync.membership", "status.pcmk.resources.hide-inactive", "status.pcmk.resources.id", "status.pcmk.resources.node", "status.pcmk.resources.orphaned", "status.pcmk.xml", "stonith-agents.describe", "stonith-agents.list", "stonith-agents.list.detailed", "stonith-agents.ocf.version-1-0", "stonith-agents.ocf.version-1-1", "stonith-agents.self-validation" ], "__ha_cluster_pcsd_capabilities_available": false }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch pcsd capabilities] ********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:124 Thursday 08 February 2024 19:42:24 +0000 (0:00:00.020) 0:00:31.091 ***** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_pcsd_capabilities_available", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Parse pcsd capabilities] ********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:132 Thursday 08 February 2024 19:42:24 +0000 (0:00:00.009) 0:00:31.101 ***** ok: [sut] => { "ansible_facts": { "__ha_cluster_pcsd_capabilities": [] }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fail if pcs is to old to configure resources and operations defaults] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:141 Thursday 08 February 2024 19:42:24 +0000 (0:00:00.019) 0:00:31.120 ***** skipping: [sut] => { "changed": false, "false_condition": "( ha_cluster_resource_defaults and not 'pcmk.properties.resource-defaults.multiple' in __ha_cluster_pcs_capabilities ) or ( ha_cluster_resource_operation_defaults and not 'pcmk.properties.operation-defaults.multiple' in __ha_cluster_pcs_capabilities )", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set hacluster password] *********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:22 Thursday 08 February 2024 19:42:24 +0000 (0:00:00.010) 0:00:31.131 ***** changed: [sut] => { "append": false, "changed": true, "comment": "cluster user", "group": 189, "home": "/home/hacluster", "move_home": false, "name": "hacluster", "password": "NOT_LOGGING_PASSWORD", "shell": "/sbin/nologin", "state": "present", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Configure shell] ****************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:29 Thursday 08 February 2024 19:42:24 +0000 (0:00:00.402) 0:00:31.533 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Stop pcsd] ************************ task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:6 Thursday 08 February 2024 19:42:24 +0000 (0:00:00.026) 0:00:31.559 ***** ok: [sut] => { "changed": false, "name": "pcsd", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target system.slice network-online.target pcsd-ruby.service systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "man:pcsd(8) man:pcs(8)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice network-online.target pcsd-ruby.service sysinit.target", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22406", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.ha_cluster : Regenerate pcsd TLS certificate and key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:11 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.476) 0:00:32.036 ***** skipping: [sut] => (item=/var/lib/pcsd/pcsd.key) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.key", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/var/lib/pcsd/pcsd.crt) => { "ansible_loop_var": "item", "changed": false, "false_condition": "ha_cluster_regenerate_keys", "item": "/var/lib/pcsd/pcsd.crt", "skip_reason": "Conditional result was False" } skipping: [sut] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.ha_cluster : Get the stat of /var/lib/pcsd] **** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:25 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.011) 0:00:32.047 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Allow certmonger to write into pcsd's certificate directory] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:30 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.017) 0:00:32.065 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [Ensure the name of ha_cluster_pcsd_certificates is /var/lib/pcsd/pcsd; Create certificates using the certificate role] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:37 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.018) 0:00:32.084 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Set pcsd's certificate directory back to cluster_var_lib_t] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:49 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.018) 0:00:32.102 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_certificates | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS private key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:64 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.018) 0:00:32.121 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcsd TLS certificate] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:71 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.020) 0:00:32.141 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_pcsd_public_key_src is string", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf] ***** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:79 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.021) 0:00:32.163 ***** changed: [sut] => { "changed": true, "checksum": "9bbea634a798cf0976b80cd3c4e34aca6a6a7d44", "dest": "/var/lib/pcsd/pcs_settings.conf", "gid": 0, "group": "root", "md5sum": "4b74001d21d3867563d0c773bde32b42", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:cluster_var_lib_t:s0", "size": 361, "src": "/root/.ansible/tmp/ansible-tmp-1707421345.1814976-25129-154830595513125/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 Thursday 08 February 2024 19:42:25 +0000 (0:00:00.611) 0:00:32.774 ***** changed: [sut] => { "changed": true, "enabled": true, "name": "pcsd", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice pcsd-ruby.service sysinit.target network-online.target basic.target systemd-journald.socket", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pcsd-ruby.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "PCS GUI and remote configuration interface", "DevicePolicy": "auto", "Documentation": "man:pcsd(8) man:pcs(8)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "EnvironmentFiles": "/etc/sysconfig/pcsd (ignore_errors=no)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/pcsd ; argv[]=/usr/sbin/pcsd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pcsd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pcsd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pcsd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "main", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice pcsd-ruby.service sysinit.target network-online.target", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22406", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.ha_cluster : Configure firewall] *************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:35 Thursday 08 February 2024 19:42:27 +0000 (0:00:01.524) 0:00:34.298 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml for sut TASK [Ensure the service and the ports status with the firewall role] ********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/firewall.yml:3 Thursday 08 February 2024 19:42:27 +0000 (0:00:00.022) 0:00:34.321 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Configure selinux] **************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:38 Thursday 08 February 2024 19:42:27 +0000 (0:00:00.022) 0:00:34.344 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Populate service facts] *********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3 Thursday 08 February 2024 19:42:27 +0000 (0:00:00.024) 0:00:34.369 ***** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "running", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Set the fence-virt/fence-agents port to _ha_cluster_selinux] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:12 Thursday 08 February 2024 19:42:28 +0000 (0:00:01.622) 0:00:35.991 ***** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Get associated selinux ports] ***** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:20 Thursday 08 February 2024 19:42:28 +0000 (0:00:00.023) 0:00:36.015 ***** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Add the high-availability service ports to _ha_cluster_selinux] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:28 Thursday 08 February 2024 19:42:29 +0000 (0:00:00.025) 0:00:36.040 ***** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [Ensure the service and the ports status with the selinux role] *********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:35 Thursday 08 February 2024 19:42:29 +0000 (0:00:00.024) 0:00:36.064 ***** skipping: [sut] => { "changed": false, "false_condition": "ansible_facts.services[\"firewalld.service\"][\"state\"] == \"running\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Install cluster packages] ********* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:44 Thursday 08 February 2024 19:42:29 +0000 (0:00:00.022) 0:00:36.087 ***** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: libwsman1-2.6.5-10.el8.x86_64", "Installed: fence-agents-all-4.2.1-129.el8.x86_64", "Installed: fence-agents-ipmilan-4.2.1-129.el8.noarch", "Installed: gnutls-utils-3.6.16-7.el8.x86_64", "Installed: fence-agents-amt-ws-4.2.1-129.el8.noarch", "Installed: fence-agents-kdump-4.2.1-129.el8.x86_64", "Installed: fence-agents-apc-4.2.1-129.el8.noarch", "Installed: fence-agents-apc-snmp-4.2.1-129.el8.noarch", "Installed: fence-agents-bladecenter-4.2.1-129.el8.noarch", "Installed: fence-agents-mpath-4.2.1-129.el8.noarch", "Installed: fence-agents-brocade-4.2.1-129.el8.noarch", "Installed: fence-agents-redfish-4.2.1-129.el8.x86_64", "Installed: fence-agents-cisco-mds-4.2.1-129.el8.noarch", "Installed: fence-agents-rhevm-4.2.1-129.el8.noarch", "Installed: fence-agents-cisco-ucs-4.2.1-129.el8.noarch", "Installed: fence-agents-rsa-4.2.1-129.el8.noarch", "Installed: fence-agents-common-4.2.1-129.el8.noarch", "Installed: fence-agents-rsb-4.2.1-129.el8.noarch", "Installed: fence-agents-compute-4.2.1-129.el8.noarch", "Installed: fence-agents-sbd-4.2.1-129.el8.noarch", "Installed: fence-agents-drac5-4.2.1-129.el8.noarch", "Installed: fence-agents-scsi-4.2.1-129.el8.noarch", "Installed: fence-agents-eaton-snmp-4.2.1-129.el8.noarch", "Installed: sbd-1.5.2-2.el8.x86_64", "Installed: fence-agents-emerson-4.2.1-129.el8.noarch", "Installed: fence-agents-vmware-rest-4.2.1-129.el8.noarch", "Installed: fence-agents-eps-4.2.1-129.el8.noarch", "Installed: telnet-1:0.17-76.el8.x86_64", "Installed: fence-agents-vmware-soap-4.2.1-129.el8.noarch", "Installed: fence-agents-heuristics-ping-4.2.1-129.el8.noarch", "Installed: python3-pexpect-4.3.1-3.el8.noarch", "Installed: fence-agents-wti-4.2.1-129.el8.noarch", "Installed: ipmitool-1.8.18-19.el8.x86_64", "Installed: fence-agents-hpblade-4.2.1-129.el8.noarch", "Installed: userspace-rcu-0.10.1-4.el8.x86_64", "Installed: fence-virt-1.0.0-6.el8.x86_64", "Installed: net-snmp-utils-1:5.8-28.el8.x86_64", "Installed: fence-agents-ibmblade-4.2.1-129.el8.noarch", "Installed: openwsman-python3-2.6.5-10.el8.x86_64", "Installed: fence-agents-ifmib-4.2.1-129.el8.noarch", "Installed: fence-agents-ilo-moonshot-4.2.1-129.el8.noarch", "Installed: device-mapper-multipath-0.8.4-41.el8.x86_64", "Installed: fence-agents-ilo-mp-4.2.1-129.el8.noarch", "Installed: fence-agents-ilo-ssh-4.2.1-129.el8.noarch", "Installed: autogen-libopts-5.18.12-8.el8.x86_64", "Installed: python3-ptyprocess-0.5.2-4.el8.noarch", "Installed: fence-agents-ilo2-4.2.1-129.el8.noarch", "Installed: gnutls-dane-3.6.16-7.el8.x86_64", "Installed: python3-suds-0.7-0.11.94664ddd46a6.el8.noarch", "Installed: device-mapper-multipath-libs-0.8.4-41.el8.x86_64", "Installed: fence-agents-intelmodular-4.2.1-129.el8.noarch", "Installed: fence-agents-ipdu-4.2.1-129.el8.noarch" ] } TASK [fedora.linux_system_roles.ha_cluster : Distribute fence-virt authkey] **** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:50 Thursday 08 February 2024 19:42:36 +0000 (0:00:07.476) 0:00:43.564 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Create /etc/cluster directory] **** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:3 Thursday 08 February 2024 19:42:36 +0000 (0:00:00.026) 0:00:43.590 ***** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/cluster", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Get fence_xvm.key] **************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:9 Thursday 08 February 2024 19:42:36 +0000 (0:00:00.222) 0:00:43.813 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller fence_xvm.key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16 Thursday 08 February 2024 19:42:36 +0000 (0:00:00.030) 0:00:43.844 ***** skipping: [sut] => { "changed": false, "false_condition": "preshared_key_src is string and preshared_key_src | length > 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller fence_xvm.key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21 Thursday 08 February 2024 19:42:36 +0000 (0:00:00.023) 0:00:43.867 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller fence_xvm.key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29 Thursday 08 February 2024 19:42:36 +0000 (0:00:00.023) 0:00:43.890 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL fence_xvm.key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44 Thursday 08 February 2024 19:42:36 +0000 (0:00:00.025) 0:00:43.916 ***** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch generated fence_xvm.key] **** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50 Thursday 08 February 2024 19:42:37 +0000 (0:00:00.209) 0:00:44.126 ***** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes fence_xvm.key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60 Thursday 08 February 2024 19:42:37 +0000 (0:00:00.028) 0:00:44.154 ***** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes fence_xvm.key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65 Thursday 08 February 2024 19:42:37 +0000 (0:00:00.198) 0:00:44.353 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes fence_xvm.key] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73 Thursday 08 February 2024 19:42:37 +0000 (0:00:00.023) 0:00:44.377 ***** skipping: [sut] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Send fence_xvm.key to nodes] ****** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:17 Thursday 08 February 2024 19:42:37 +0000 (0:00:00.031) 0:00:44.408 ***** changed: [sut] => { "changed": true, "checksum": "07fa2e28e7ed543ce75f6bb8362a2cb484e2ddf0", "dest": "/etc/cluster/fence_xvm.key", "gid": 0, "group": "root", "md5sum": "22792e1832aaa3e4bb11445d83eb2913", "mode": "0600", "owner": "root", "secontext": "system_u:object_r:cluster_conf_t:s0", "size": 512, "src": "/root/.ansible/tmp/ansible-tmp-1707421357.4353845-25165-22757292502985/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Configure SBD] ******************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:55 Thursday 08 February 2024 19:42:37 +0000 (0:00:00.538) 0:00:44.947 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Configure watchdog kernel module blocklist] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:10 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.076) 0:00:45.023 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Unload watchdog kernel modules from blocklist] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:22 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.019) 0:00:45.042 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Configure watchdog kernel modules] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:30 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.019) 0:00:45.062 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Load watchdog kernel modules] ***** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:40 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.018) 0:00:45.081 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Probe SBD devices] **************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:57 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.018) 0:00:45.099 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Initialize SBD devices] *********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:70 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.018) 0:00:45.118 ***** skipping: [sut] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.ha_cluster : Distribute SBD config] ************ task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:88 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.010) 0:00:45.128 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_sbd_enabled", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Ensure /etc/systemd/system/sbd.service.d directory exists] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:109 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.018) 0:00:45.147 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_sbd_enabled", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Override start timeout for SBD] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:117 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.018) 0:00:45.165 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_sbd_enabled", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Reload systemd service files] ***** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:131 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.018) 0:00:45.184 ***** skipping: [sut] => { "changed": false, "false_condition": "ha_cluster_sbd_enabled", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Get services status - detect pacemaker] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:135 Thursday 08 February 2024 19:42:38 +0000 (0:00:00.018) 0:00:45.203 ***** ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "corosync-notifyd.service": { "name": "corosync-notifyd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "corosync-qnetd.service": { "name": "corosync-qnetd.service", "source": "systemd", "state": "running", "status": "disabled" }, "corosync.service": { "name": "corosync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crm_mon.service": { "name": "crm_mon.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-activation.service": { "name": "lvm2-activation.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pacemaker.service": { "name": "pacemaker.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pcsd-ruby.service": { "name": "pcsd-ruby.service", "source": "systemd", "state": "running", "status": "disabled" }, "pcsd.service": { "name": "pcsd.service", "source": "systemd", "state": "running", "status": "enabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "sbd.service": { "name": "sbd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "sbd_remote.service": { "name": "sbd_remote.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Set stonith-watchdog-timeout cluster property in CIB] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:142 Thursday 08 February 2024 19:42:39 +0000 (0:00:01.554) 0:00:46.757 ***** changed: [sut] => { "changed": true, "cmd": [ "pcs", "--force", "-f", "/var/lib/pacemaker/cib/cib.xml", "--", "property", "set", "stonith-watchdog-timeout=0" ], "delta": "0:00:00.643381", "end": "2024-02-08 19:42:40.561198", "rc": 0, "start": "2024-02-08 19:42:39.917817" } TASK [fedora.linux_system_roles.ha_cluster : Correct cib.xml ownership] ******** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:155 Thursday 08 February 2024 19:42:40 +0000 (0:00:00.840) 0:00:47.598 ***** changed: [sut] => { "changed": true, "gid": 189, "group": "haclient", "mode": "0600", "owner": "hacluster", "path": "/var/lib/pacemaker/cib/cib.xml", "secontext": "unconfined_u:object_r:cluster_var_lib_t:s0", "size": 412, "state": "file", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Clean cib.xml.sig] **************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:167 Thursday 08 February 2024 19:42:40 +0000 (0:00:00.206) 0:00:47.804 ***** ok: [sut] => { "changed": false, "path": "/var/lib/pacemaker/cib/cib.xml.sig", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Configure corosync] *************** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:58 Thursday 08 February 2024 19:42:40 +0000 (0:00:00.201) 0:00:48.006 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf tempfile] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:3 Thursday 08 February 2024 19:42:41 +0000 (0:00:00.035) 0:00:48.041 ***** changed: [sut] => { "changed": true, "gid": 0, "group": "root", "mode": "0600", "owner": "root", "path": "/tmp/ansible.v0bvpvat_ha_cluster_corosync_conf", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 0, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf file content] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:16 Thursday 08 February 2024 19:42:41 +0000 (0:00:00.246) 0:00:48.288 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Create a corosync.conf file content using pcs-0.10] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:3 Thursday 08 February 2024 19:42:41 +0000 (0:00:00.025) 0:00:48.314 ***** changed: [sut] => { "changed": true, "cmd": [ "pcs", "cluster", "setup", "--corosync_conf", "/tmp/ansible.v0bvpvat_ha_cluster_corosync_conf", "--overwrite", "--no-cluster-uuid", "--", "test-cluster", "localhost" ], "delta": "0:00:01.542774", "end": "2024-02-08 19:42:43.029790", "rc": 0, "start": "2024-02-08 19:42:41.487016" } STDOUT: Warning: Unable to read the known-hosts file: No such file or directory: '/var/lib/pcsd/known-hosts' No addresses specified for host 'localhost', using 'localhost' TASK [fedora.linux_system_roles.ha_cluster : Add qdevice configuration to corosync.conf] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:80 Thursday 08 February 2024 19:42:43 +0000 (0:00:01.753) 0:00:50.068 ***** changed: [sut] => { "changed": true, "cmd": [ "pcs", "quorum", "device", "add", "--corosync_conf", "/tmp/ansible.v0bvpvat_ha_cluster_corosync_conf", "--", "model", "net", "host=localhost", "algorithm=lms" ], "delta": "0:00:00.552880", "end": "2024-02-08 19:42:43.783857", "rc": 0, "start": "2024-02-08 19:42:43.230977" } TASK [fedora.linux_system_roles.ha_cluster : Fetch created corosync.conf file] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:19 Thursday 08 February 2024 19:42:43 +0000 (0:00:00.753) 0:00:50.821 ***** ok: [sut] => { "changed": false, "content": "dG90ZW0gewogICAgdmVyc2lvbjogMgogICAgY2x1c3Rlcl9uYW1lOiB0ZXN0LWNsdXN0ZXIKICAgIHRyYW5zcG9ydDoga25ldAogICAgY3J5cHRvX2NpcGhlcjogYWVzMjU2CiAgICBjcnlwdG9faGFzaDogc2hhMjU2Cn0KCm5vZGVsaXN0IHsKICAgIG5vZGUgewogICAgICAgIHJpbmcwX2FkZHI6IGxvY2FsaG9zdAogICAgICAgIG5hbWU6IGxvY2FsaG9zdAogICAgICAgIG5vZGVpZDogMQogICAgfQp9CgpxdW9ydW0gewogICAgcHJvdmlkZXI6IGNvcm9zeW5jX3ZvdGVxdW9ydW0KCiAgICBkZXZpY2UgewogICAgICAgIG1vZGVsOiBuZXQKCiAgICAgICAgbmV0IHsKICAgICAgICAgICAgYWxnb3JpdGhtOiBsbXMKICAgICAgICAgICAgaG9zdDogbG9jYWxob3N0CiAgICAgICAgfQogICAgfQp9Cgpsb2dnaW5nIHsKICAgIHRvX2xvZ2ZpbGU6IHllcwogICAgbG9nZmlsZTogL3Zhci9sb2cvY2x1c3Rlci9jb3Jvc3luYy5sb2cKICAgIHRvX3N5c2xvZzogeWVzCiAgICB0aW1lc3RhbXA6IG9uCn0K", "encoding": "base64", "source": "/tmp/ansible.v0bvpvat_ha_cluster_corosync_conf" } TASK [fedora.linux_system_roles.ha_cluster : Distribute corosync.conf file] **** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:26 Thursday 08 February 2024 19:42:44 +0000 (0:00:00.263) 0:00:51.084 ***** changed: [sut] => { "changed": true, "checksum": "a1837653ba14103400399abc9740152e1da908c6", "dest": "/etc/corosync/corosync.conf", "gid": 0, "group": "root", "md5sum": "3478d15f60e30891b975dc627c409dec", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 507, "src": "/root/.ansible/tmp/ansible-tmp-1707421364.1143894-25203-90152580575628/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Remove a corosync.conf tempfile] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:36 Thursday 08 February 2024 19:42:44 +0000 (0:00:00.525) 0:00:51.610 ***** changed: [sut] => { "changed": true, "path": "/tmp/ansible.v0bvpvat_ha_cluster_corosync_conf", "state": "absent" } TASK [fedora.linux_system_roles.ha_cluster : Cluster auth] ********************* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:61 Thursday 08 February 2024 19:42:44 +0000 (0:00:00.203) 0:00:51.814 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check pcs auth status] ************ task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:3 Thursday 08 February 2024 19:42:44 +0000 (0:00:00.037) 0:00:51.851 ***** changed: [sut] => { "changed": true, "cmd": [ "pcs", "status", "pcsd", "--", "localhost", "localhost" ], "delta": "0:00:00.604088", "end": "2024-02-08 19:42:45.624646", "failed_when_result": false, "rc": 2, "start": "2024-02-08 19:42:45.020558" } STDOUT: Warning: Unable to read the known-hosts file: No such file or directory: '/var/lib/pcsd/known-hosts' localhost: Unable to authenticate localhost: Unable to authenticate MSG: non-zero return code TASK [fedora.linux_system_roles.ha_cluster : Run pcs auth] ********************* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:23 Thursday 08 February 2024 19:42:45 +0000 (0:00:00.811) 0:00:52.662 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Pcs auth using pcs-0.10] ********** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:3 Thursday 08 February 2024 19:42:45 +0000 (0:00:00.027) 0:00:52.690 ***** changed: [sut] => { "changed": true, "cmd": [ "pcs", "host", "auth", "-u", "hacluster", "--", "localhost" ], "delta": "0:00:00.955832", "end": "2024-02-08 19:42:46.837996", "rc": 0, "start": "2024-02-08 19:42:45.882164" } STDOUT: Password: localhost: Authorized TASK [fedora.linux_system_roles.ha_cluster : Pcs auth for qdevice using pcs-0.10] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:19 Thursday 08 February 2024 19:42:46 +0000 (0:00:01.184) 0:00:53.875 ***** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_host not in __ha_cluster_all_node_names", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Distribute cluster shared keys] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:66 Thursday 08 February 2024 19:42:46 +0000 (0:00:00.021) 0:00:53.896 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Get corosync authkey] ************* task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:3 Thursday 08 February 2024 19:42:46 +0000 (0:00:00.038) 0:00:53.934 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16 Thursday 08 February 2024 19:42:46 +0000 (0:00:00.034) 0:00:53.969 ***** skipping: [sut] => { "changed": false, "false_condition": "preshared_key_src is string and preshared_key_src | length > 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21 Thursday 08 February 2024 19:42:46 +0000 (0:00:00.025) 0:00:53.994 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29 Thursday 08 February 2024 19:42:47 +0000 (0:00:00.024) 0:00:54.018 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44 Thursday 08 February 2024 19:42:47 +0000 (0:00:00.023) 0:00:54.042 ***** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch generated corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50 Thursday 08 February 2024 19:42:47 +0000 (0:00:00.201) 0:00:54.244 ***** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60 Thursday 08 February 2024 19:42:47 +0000 (0:00:00.026) 0:00:54.271 ***** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65 Thursday 08 February 2024 19:42:47 +0000 (0:00:00.195) 0:00:54.466 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes corosync authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73 Thursday 08 February 2024 19:42:47 +0000 (0:00:00.023) 0:00:54.490 ***** skipping: [sut] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Distribute corosync authkey] ****** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:11 Thursday 08 February 2024 19:42:47 +0000 (0:00:00.033) 0:00:54.524 ***** changed: [sut] => { "changed": true, "checksum": "7a5080e6cd7503d803e26e11ded9a84978b62ba3", "dest": "/etc/corosync/authkey", "gid": 0, "group": "root", "md5sum": "d292cf84917ade9bb2d7574b4c5fbc10", "mode": "0400", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 256, "src": "/root/.ansible/tmp/ansible-tmp-1707421367.5524926-25233-209979527735870/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.ha_cluster : Get pacemaker authkey] ************ task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:20 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.521) 0:00:55.045 ***** included: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml for sut TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on the controller pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:16 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.034) 0:00:55.080 ***** skipping: [sut] => { "changed": false, "false_condition": "preshared_key_src is string and preshared_key_src | length > 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from the controller pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:21 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.024) 0:00:55.104 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from the controller pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:29 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.023) 0:00:55.128 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Generate key using OpenSSL pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:44 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.024) 0:00:55.152 ***** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Fetch generated pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:50 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.203) 0:00:55.356 ***** ok: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Check if key exists on cluster nodes pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:60 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.027) 0:00:55.383 ***** ok: [sut] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.ha_cluster : Slurp existing key from cluster nodes pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:65 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.194) 0:00:55.577 ***** skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Use the slurped key from cluster nodes pacemaker authkey] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/presharedkey.yml:73 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.026) 0:00:55.604 ***** skipping: [sut] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [sut] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.ha_cluster : Distribute pacemaker authkey] ***** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:28 Thursday 08 February 2024 19:42:48 +0000 (0:00:00.034) 0:00:55.638 ***** changed: [sut] => { "changed": true, "checksum": "e534e53f563c652960657c7b33575c5f07e0c19c", "dest": "/etc/pacemaker/authkey", "gid": 189, "group": "haclient", "md5sum": "c50ffb4df2cc76117c6a70356cfa1e29", "mode": "0400", "owner": "hacluster", "secontext": "system_u:object_r:etc_t:s0", "size": 256, "src": "/root/.ansible/tmp/ansible-tmp-1707421368.6646042-25253-15941150532711/source", "state": "file", "uid": 189 } TASK [fedora.linux_system_roles.ha_cluster : Remove qdevice certificates [CLI]] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:37 Thursday 08 February 2024 19:42:49 +0000 (0:00:00.513) 0:00:56.151 ***** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_model != \"net\" or ha_cluster_regenerate_keys", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Remove qdevice certificates [API]] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:46 Thursday 08 February 2024 19:42:49 +0000 (0:00:00.019) 0:00:56.171 ***** skipping: [sut] => { "changed": false, "false_condition": "__ha_cluster_qdevice_model != \"net\" or ha_cluster_regenerate_keys", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.ha_cluster : Obtain and distribute qdevice certificates [CLI]] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:54 Thursday 08 February 2024 19:42:49 +0000 (0:00:00.019) 0:00:56.190 ***** An exception occurred during task execution. To see the full traceback, use -vvv. The error was: NoneType: None fatal: [sut]: FAILED! => { "changed": false, "rc": 1 } STDOUT: Checking if qdevice certs are present certutil: function failed: SEC_ERROR_BAD_DATABASE: security library: bad database. Getting pcs token and address for 'localhost' Downloading qnetd CA certificate from 'localhost:2224' and initializing qdevice certificate storage % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 5791 100 5791 0 0 188k 0 --:--:-- --:--:-- --:--:-- 188k Error: unable to run command /usr/sbin/corosync-qdevice-net-certutil -i -c /etc/corosync/qdevice/net/nssdb/qnetd-cacert.crt: No such file or directory: '/usr/sbin/corosync-qdevice-net-certutil' STDERR: Shared connection to 10.31.14.54 closed. MSG: non-zero return code TASK [Clean up test environment for qnetd] ************************************* task path: /WORKDIR/git-suseibi8cjjt/tests/template_qdevice.yml:63 Thursday 08 February 2024 19:42:50 +0000 (0:00:00.836) 0:00:57.027 ***** TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 Thursday 08 February 2024 19:42:50 +0000 (0:00:00.038) 0:00:57.066 ***** changed: [sut] => { "changed": true, "rc": 0, "results": [ "Removed: corosync-qnetd-3.0.2-2.el8.x86_64" ] } TASK [fedora.linux_system_roles.ha_cluster : Make sure qnetd config files are not present] *** task path: /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:16 Thursday 08 February 2024 19:42:53 +0000 (0:00:03.236) 0:01:00.302 ***** changed: [sut] => { "changed": true, "path": "/etc/corosync/qnetd", "state": "absent" } to retry, use: --limit @/tmp/tests_qdevice_minimal.retry PLAY RECAP ********************************************************************* sut : ok=76 changed=23 unreachable=0 failed=1 skipped=58 rescued=0 ignored=0 Thursday 08 February 2024 19:42:53 +0000 (0:00:00.207) 0:01:00.510 ***** =============================================================================== fedora.linux_system_roles.ha_cluster : Install qnetd packages ---------- 19.15s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_qnetd.yml:9 fedora.linux_system_roles.ha_cluster : Install cluster packages --------- 7.48s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:44 fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed --- 3.24s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 fedora.linux_system_roles.ha_cluster : Make sure qnetd is not installed --- 2.56s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_cleanup_qnetd.yml:9 fedora.linux_system_roles.ha_cluster : Install role essential packages --- 2.47s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/main.yml:11 fedora.linux_system_roles.ha_cluster : Set up qnetd --------------------- 2.29s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/test_setup_qnetd.yml:17 fedora.linux_system_roles.ha_cluster : Create a corosync.conf file content using pcs-0.10 --- 1.75s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:3 fedora.linux_system_roles.ha_cluster : Fetch pcs capabilities ----------- 1.73s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/check-and-prepare-role-variables.yml:110 fedora.linux_system_roles.ha_cluster : Populate service facts ----------- 1.62s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/selinux.yml:3 fedora.linux_system_roles.ha_cluster : Get services status - detect pacemaker --- 1.55s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:135 fedora.linux_system_roles.ha_cluster : Start pcsd with updated config files and configure it to start on boot --- 1.52s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:88 fedora.linux_system_roles.ha_cluster : Pcs auth using pcs-0.10 ---------- 1.18s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth-pcs-0.10.yml:3 fedora.linux_system_roles.ha_cluster : Set stonith-watchdog-timeout cluster property in CIB --- 0.84s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/sbd.yml:142 fedora.linux_system_roles.ha_cluster : Obtain and distribute qdevice certificates [CLI] --- 0.84s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-keys.yml:54 fedora.linux_system_roles.ha_cluster : Check pcs auth status ------------ 0.81s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-auth.yml:3 Gathering Facts --------------------------------------------------------- 0.77s /WORKDIR/git-suseibi8cjjt/tests/tests_qdevice_minimal.yml:9 ------------------- fedora.linux_system_roles.ha_cluster : Add qdevice configuration to corosync.conf --- 0.75s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/pcs-cluster-setup-pcs-0.10.yml:80 fedora.linux_system_roles.ha_cluster : Distribute pcs_settings.conf ----- 0.61s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/configure-shell.yml:79 fedora.linux_system_roles.ha_cluster : Send fence_xvm.key to nodes ------ 0.54s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/distribute-fence-virt-key.yml:17 fedora.linux_system_roles.ha_cluster : Distribute corosync.conf file ---- 0.53s /WORKDIR/git-suseibi8cjjt/.collection/ansible_collections/fedora/linux_system_roles/roles/ha_cluster/tasks/shell_pcs/cluster-setup-corosync.yml:26 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- [DEPRECATION WARNING]: ANSIBLE_COLLECTIONS_PATHS option, does not fit var naming standard, use the singular form ANSIBLE_COLLECTIONS_PATH instead. This feature will be removed from ansible-core in version 2.19. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. [DEPRECATION WARNING]: Encryption using the Python crypt module is deprecated. The Python crypt module is deprecated and will be removed from Python 3.13. Install the passlib library for continued encryption functionality. This feature will be removed in version 2.17. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---