# STDOUT: ---v---v---v---v---v--- ansible-playbook 2.9.27 config file = /etc/ansible/ansible.cfg configured module search path = ['/home/jenkins/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /opt/ansible-2.9/lib/python3.6/site-packages/ansible executable location = /opt/ansible-2.9/bin/ansible-playbook python version = 3.6.8 (default, Jan 25 2023, 15:03:30) [GCC 8.5.0 20210514 (Red Hat 8.5.0-18)] Using /etc/ansible/ansible.cfg as config file Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_lvm_pool_members.yml ******************************************* 1 plays in /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml PLAY [Test lvm pool members] *************************************************** TASK [Gathering Facts] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:2 Thursday 01 June 2023 00:34:39 +0000 (0:00:00.019) 0:00:00.019 ********* ok: [sut] META: ran handlers TASK [Run the role] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:14 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.819) 0:00:00.839 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.022) 0:00:00.862 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.032) 0:00:00.894 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.344) 0:00:01.239 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.047) 0:00:01.286 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.012) 0:00:01.299 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.012) 0:00:01.312 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:34:40 +0000 (0:00:00.041) 0:00:01.353 ********* changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: python3-blivet-1:3.4.4-1.fc36.noarch", "Installed: python3-blockdev-2.28-2.fc36.x86_64", "Installed: python3-bytesize-2.7-1.fc36.x86_64", "Installed: device-mapper-event-1.02.175-7.fc36.x86_64", "Installed: libblockdev-btrfs-2.28-2.fc36.x86_64", "Installed: lzo-2.10-6.fc36.x86_64", "Installed: device-mapper-event-libs-1.02.175-7.fc36.x86_64", "Installed: sgpio-1.2.0.10-30.fc36.x86_64", "Installed: device-mapper-persistent-data-0.9.0-7.fc36.x86_64", "Installed: python3-pyparted-1:3.12.0-1.fc36.x86_64", "Installed: libblockdev-dm-2.28-2.fc36.x86_64", "Installed: lvm2-2.03.11-7.fc36.x86_64", "Installed: cxl-libs-76.1-1.fc36.x86_64", "Installed: lvm2-libs-2.03.11-7.fc36.x86_64", "Installed: libblockdev-kbd-2.28-2.fc36.x86_64", "Installed: blivet-data-1:3.4.4-1.fc36.noarch", "Installed: libblockdev-lvm-2.28-2.fc36.x86_64", "Installed: libblockdev-mpath-2.28-2.fc36.x86_64", "Installed: libblockdev-nvdimm-2.28-2.fc36.x86_64", "Installed: ndctl-76.1-1.fc36.x86_64", "Installed: lsof-4.94.0-3.fc36.x86_64", "Installed: device-mapper-multipath-0.8.7-9.fc36.x86_64", "Installed: bcache-tools-1.1-2.fc36.x86_64", "Installed: ndctl-libs-76.1-1.fc36.x86_64", "Installed: device-mapper-multipath-libs-0.8.7-9.fc36.x86_64", "Installed: daxctl-libs-76.1-1.fc36.x86_64", "Installed: btrfs-progs-6.2.2-1.fc36.x86_64", "Installed: dmraid-1.0.0.rc16-52.fc36.x86_64", "Installed: dmraid-events-1.0.0.rc16-52.fc36.x86_64", "Installed: dmraid-libs-1.0.0.rc16-52.fc36.x86_64", "Installed: libaio-0.3.111-13.fc36.x86_64", "Installed: iniparser-4.1-9.fc36.x86_64" ] } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:34:48 +0000 (0:00:07.806) 0:00:09.160 ********* ok: [sut] => { "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined" } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:34:48 +0000 (0:00:00.025) 0:00:09.186 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:34:48 +0000 (0:00:00.026) 0:00:09.212 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:34:49 +0000 (0:00:00.656) 0:00:09.868 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml for sut TASK [linux-system-roles.storage : Check if the COPR support packages should be installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:2 Thursday 01 June 2023 00:34:49 +0000 (0:00:00.046) 0:00:09.915 ********* skipping: [sut] => (item={'repository': 'rhawalsh/dm-vdo', 'packages': ['vdo', 'kmod-vdo']}) => { "ansible_loop_var": "repo", "changed": false, "repo": { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure COPR support packages are present] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:13 Thursday 01 June 2023 00:34:49 +0000 (0:00:00.027) 0:00:09.942 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable COPRs] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/enable_coprs.yml:18 Thursday 01 June 2023 00:34:49 +0000 (0:00:00.016) 0:00:09.959 ********* skipping: [sut] => (item={'repository': 'rhawalsh/dm-vdo', 'packages': ['vdo', 'kmod-vdo']}) => { "ansible_loop_var": "repo", "changed": false, "repo": { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" }, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:34:49 +0000 (0:00:00.023) 0:00:09.982 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:34:51 +0000 (0:00:02.360) 0:00:12.343 ********* ok: [sut] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "arp-ethers.service": { "name": "arp-ethers.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "bluetooth.service": { "name": "bluetooth.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.bluez.service": { "name": "dbus-org.bluez.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.oom1.service": { "name": "dbus-org.freedesktop.oom1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.resolve1.service": { "name": "dbus-org.freedesktop.resolve1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dmraid-activation.service": { "name": "dmraid-activation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "fwupd-offline-update.service": { "name": "fwupd-offline-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd-refresh.service": { "name": "fwupd-refresh.service", "source": "systemd", "state": "inactive", "status": "static" }, "fwupd.service": { "name": "fwupd.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "lvm2-pvscan@.service": { "name": "lvm2-pvscan@.service", "source": "systemd", "state": "unknown", "status": "static" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-system-token.service": { "name": "systemd-boot-system-token.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-homed-activate.service": { "name": "systemd-homed-activate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-homed.service": { "name": "systemd-homed.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-networkd.service": { "name": "systemd-networkd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "running", "status": "enabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-time-wait-sync.service": { "name": "systemd-time-wait-sync.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-zram-setup@.service": { "name": "systemd-zram-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-zram-setup@zram0.service": { "name": "systemd-zram-setup@zram0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "udisks2.service": { "name": "udisks2.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:34:54 +0000 (0:00:02.240) 0:00:14.584 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.032) 0:00:14.617 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.016) 0:00:14.633 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.367) 0:00:15.001 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.018) 0:00:15.019 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.015) 0:00:15.034 ********* ok: [sut] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.020) 0:00:15.055 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.021) 0:00:15.076 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.038) 0:00:15.114 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.025) 0:00:15.139 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.019) 0:00:15.158 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.016) 0:00:15.174 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.016) 0:00:15.191 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:34:54 +0000 (0:00:00.017) 0:00:15.208 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579192.8006275, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1684244424.757, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131081, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1684244183.529, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3816983141", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:34:55 +0000 (0:00:00.259) 0:00:15.468 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:34:55 +0000 (0:00:00.016) 0:00:15.484 ********* ok: [sut] TASK [Mark tasks to be skipped] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:18 Thursday 01 June 2023 00:34:55 +0000 (0:00:00.613) 0:00:16.098 ********* ok: [sut] => { "ansible_facts": { "storage_skip_checks": [ "blivet_available", "packages_installed", "service_facts" ] }, "changed": false } TASK [Gather package facts] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:25 Thursday 01 June 2023 00:34:55 +0000 (0:00:00.017) 0:00:16.115 ********* ok: [sut] => { "ansible_facts": { "packages": { "ModemManager-glib": [ { "arch": "x86_64", "epoch": null, "name": "ModemManager-glib", "release": "1.fc36", "source": "rpm", "version": "1.18.8" } ], "NetworkManager": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager", "release": "1.fc36", "source": "rpm", "version": "1.38.6" } ], "NetworkManager-libnm": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-libnm", "release": "1.fc36", "source": "rpm", "version": "1.38.6" } ], "alternatives": [ { "arch": "x86_64", "epoch": null, "name": "alternatives", "release": "1.fc36", "source": "rpm", "version": "1.22" } ], "amd-gpu-firmware": [ { "arch": "noarch", "epoch": null, "name": "amd-gpu-firmware", "release": "148.fc36", "source": "rpm", "version": "20230310" } ], "aspell": [ { "arch": "x86_64", "epoch": 12, "name": "aspell", "release": "9.fc36", "source": "rpm", "version": "0.60.8" } ], "aspell-en": [ { "arch": "x86_64", "epoch": 50, "name": "aspell-en", "release": "5.fc36", "source": "rpm", "version": "2020.12.07" } ], "audit": [ { "arch": "x86_64", "epoch": null, "name": "audit", "release": "1.fc36", "source": "rpm", "version": "3.1.1" } ], "audit-libs": [ { "arch": "x86_64", "epoch": null, "name": "audit-libs", "release": "1.fc36", "source": "rpm", "version": "3.1.1" } ], "authselect": [ { "arch": "x86_64", "epoch": null, "name": "authselect", "release": "1.fc36", "source": "rpm", "version": "1.4.0" } ], "authselect-libs": [ { "arch": "x86_64", "epoch": null, "name": "authselect-libs", "release": "1.fc36", "source": "rpm", "version": "1.4.0" } ], "avahi-libs": [ { "arch": "x86_64", "epoch": null, "name": "avahi-libs", "release": "16.fc36", "source": "rpm", "version": "0.8" } ], "basesystem": [ { "arch": "noarch", "epoch": null, "name": "basesystem", "release": "13.fc36", "source": "rpm", "version": "11" } ], "bash": [ { "arch": "x86_64", "epoch": null, "name": "bash", "release": "1.fc36", "source": "rpm", "version": "5.2.15" } ], "bc": [ { "arch": "x86_64", "epoch": null, "name": "bc", "release": "15.fc36", "source": "rpm", "version": "1.07.1" } ], "bcache-tools": [ { "arch": "x86_64", "epoch": null, "name": "bcache-tools", "release": "2.fc36", "source": "rpm", "version": "1.1" } ], "beakerlib": [ { "arch": "noarch", "epoch": null, "name": "beakerlib", "release": "1.fc36", "source": "rpm", "version": "1.29.3" } ], "beakerlib-redhat": [ { "arch": "noarch", "epoch": null, "name": "beakerlib-redhat", "release": "33.fc36eng", "source": "rpm", "version": "1" } ], "binutils": [ { "arch": "x86_64", "epoch": null, "name": "binutils", "release": "37.fc36", "source": "rpm", "version": "2.37" } ], "binutils-gold": [ { "arch": "x86_64", "epoch": null, "name": "binutils-gold", "release": "37.fc36", "source": "rpm", "version": "2.37" } ], "bison": [ { "arch": "x86_64", "epoch": null, "name": "bison", "release": "2.fc36", "source": "rpm", "version": "3.8.2" } ], "blivet-data": [ { "arch": "noarch", "epoch": 1, "name": "blivet-data", "release": "1.fc36", "source": "rpm", "version": "3.4.4" } ], "bluez": [ { "arch": "x86_64", "epoch": null, "name": "bluez", "release": "4.fc36", "source": "rpm", "version": "5.66" } ], "boost-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "boost-filesystem", "release": "12.fc36", "source": "rpm", "version": "1.76.0" } ], "boost-system": [ { "arch": "x86_64", "epoch": null, "name": "boost-system", "release": "12.fc36", "source": "rpm", "version": "1.76.0" } ], "boost-thread": [ { "arch": "x86_64", "epoch": null, "name": "boost-thread", "release": "12.fc36", "source": "rpm", "version": "1.76.0" } ], "btrfs-progs": [ { "arch": "x86_64", "epoch": null, "name": "btrfs-progs", "release": "1.fc36", "source": "rpm", "version": "6.2.2" } ], "bzip2-libs": [ { "arch": "x86_64", "epoch": null, "name": "bzip2-libs", "release": "11.fc36", "source": "rpm", "version": "1.0.8" } ], "c-ares": [ { "arch": "x86_64", "epoch": null, "name": "c-ares", "release": "1.fc36", "source": "rpm", "version": "1.19.0" } ], "ca-certificates": [ { "arch": "noarch", "epoch": null, "name": "ca-certificates", "release": "1.0.fc36", "source": "rpm", "version": "2023.2.60" } ], "checkpolicy": [ { "arch": "x86_64", "epoch": null, "name": "checkpolicy", "release": "2.fc36", "source": "rpm", "version": "3.3" } ], "chkconfig": [ { "arch": "x86_64", "epoch": null, "name": "chkconfig", "release": "1.fc36", "source": "rpm", "version": "1.22" } ], "chrony": [ { "arch": "x86_64", "epoch": null, "name": "chrony", "release": "1.fc36", "source": "rpm", "version": "4.3" } ], "cloud-init": [ { "arch": "noarch", "epoch": null, "name": "cloud-init", "release": "3.fc36", "source": "rpm", "version": "22.1" } ], "cloud-utils-growpart": [ { "arch": "noarch", "epoch": null, "name": "cloud-utils-growpart", "release": "10.fc36", "source": "rpm", "version": "0.31" } ], "coreutils": [ { "arch": "x86_64", "epoch": null, "name": "coreutils", "release": "9.fc36", "source": "rpm", "version": "9.0" } ], "coreutils-common": [ { "arch": "x86_64", "epoch": null, "name": "coreutils-common", "release": "9.fc36", "source": "rpm", "version": "9.0" } ], "cpio": [ { "arch": "x86_64", "epoch": null, "name": "cpio", "release": "12.fc36", "source": "rpm", "version": "2.13" } ], "cpp": [ { "arch": "x86_64", "epoch": null, "name": "cpp", "release": "4.fc36", "source": "rpm", "version": "12.2.1" } ], "cracklib": [ { "arch": "x86_64", "epoch": null, "name": "cracklib", "release": "28.fc36", "source": "rpm", "version": "2.9.6" } ], "cracklib-dicts": [ { "arch": "x86_64", "epoch": null, "name": "cracklib-dicts", "release": "28.fc36", "source": "rpm", "version": "2.9.6" } ], "createrepo_c": [ { "arch": "x86_64", "epoch": null, "name": "createrepo_c", "release": "1.fc36", "source": "rpm", "version": "0.21.1" } ], "createrepo_c-libs": [ { "arch": "x86_64", "epoch": null, "name": "createrepo_c-libs", "release": "1.fc36", "source": "rpm", "version": "0.21.1" } ], "crypto-policies": [ { "arch": "noarch", "epoch": null, "name": "crypto-policies", "release": "1.gitdfb10ea.fc36", "source": "rpm", "version": "20220428" } ], "crypto-policies-scripts": [ { "arch": "noarch", "epoch": null, "name": "crypto-policies-scripts", "release": "1.gitdfb10ea.fc36", "source": "rpm", "version": "20220428" } ], "cryptsetup-libs": [ { "arch": "x86_64", "epoch": null, "name": "cryptsetup-libs", "release": "2.fc36", "source": "rpm", "version": "2.4.3" } ], "curl": [ { "arch": "x86_64", "epoch": null, "name": "curl", "release": "14.fc36", "source": "rpm", "version": "7.82.0" } ], "cxl-libs": [ { "arch": "x86_64", "epoch": null, "name": "cxl-libs", "release": "1.fc36", "source": "rpm", "version": "76.1" } ], "cyrus-sasl-lib": [ { "arch": "x86_64", "epoch": null, "name": "cyrus-sasl-lib", "release": "18.fc36", "source": "rpm", "version": "2.1.27" } ], "daxctl-libs": [ { "arch": "x86_64", "epoch": null, "name": "daxctl-libs", "release": "1.fc36", "source": "rpm", "version": "76.1" } ], "dbus": [ { "arch": "x86_64", "epoch": 1, "name": "dbus", "release": "1.fc36", "source": "rpm", "version": "1.14.4" } ], "dbus-broker": [ { "arch": "x86_64", "epoch": null, "name": "dbus-broker", "release": "1.fc36", "source": "rpm", "version": "33" } ], "dbus-common": [ { "arch": "noarch", "epoch": 1, "name": "dbus-common", "release": "1.fc36", "source": "rpm", "version": "1.14.4" } ], "dbus-libs": [ { "arch": "x86_64", "epoch": 1, "name": "dbus-libs", "release": "1.fc36", "source": "rpm", "version": "1.14.4" } ], "deltarpm": [ { "arch": "x86_64", "epoch": null, "name": "deltarpm", "release": "11.fc36", "source": "rpm", "version": "3.6.2" } ], "device-mapper": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper", "release": "7.fc36", "source": "rpm", "version": "1.02.175" } ], "device-mapper-event": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-event", "release": "7.fc36", "source": "rpm", "version": "1.02.175" } ], "device-mapper-event-libs": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-event-libs", "release": "7.fc36", "source": "rpm", "version": "1.02.175" } ], "device-mapper-libs": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-libs", "release": "7.fc36", "source": "rpm", "version": "1.02.175" } ], "device-mapper-multipath": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-multipath", "release": "9.fc36", "source": "rpm", "version": "0.8.7" } ], "device-mapper-multipath-libs": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-multipath-libs", "release": "9.fc36", "source": "rpm", "version": "0.8.7" } ], "device-mapper-persistent-data": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-persistent-data", "release": "7.fc36", "source": "rpm", "version": "0.9.0" } ], "dhcp-client": [ { "arch": "x86_64", "epoch": 12, "name": "dhcp-client", "release": "4.P1.fc36", "source": "rpm", "version": "4.4.3" } ], "dhcp-common": [ { "arch": "noarch", "epoch": 12, "name": "dhcp-common", "release": "4.P1.fc36", "source": "rpm", "version": "4.4.3" } ], "diffutils": [ { "arch": "x86_64", "epoch": null, "name": "diffutils", "release": "2.fc36", "source": "rpm", "version": "3.8" } ], "dmraid": [ { "arch": "x86_64", "epoch": null, "name": "dmraid", "release": "52.fc36", "source": "rpm", "version": "1.0.0.rc16" } ], "dmraid-events": [ { "arch": "x86_64", "epoch": null, "name": "dmraid-events", "release": "52.fc36", "source": "rpm", "version": "1.0.0.rc16" } ], "dmraid-libs": [ { "arch": "x86_64", "epoch": null, "name": "dmraid-libs", "release": "52.fc36", "source": "rpm", "version": "1.0.0.rc16" } ], "dnf": [ { "arch": "noarch", "epoch": null, "name": "dnf", "release": "1.fc36", "source": "rpm", "version": "4.15.0" } ], "dnf-data": [ { "arch": "noarch", "epoch": null, "name": "dnf-data", "release": "1.fc36", "source": "rpm", "version": "4.15.0" } ], "dnf-plugins-core": [ { "arch": "noarch", "epoch": null, "name": "dnf-plugins-core", "release": "1.fc36", "source": "rpm", "version": "4.4.0" } ], "dnf-utils": [ { "arch": "noarch", "epoch": null, "name": "dnf-utils", "release": "1.fc36", "source": "rpm", "version": "4.4.0" } ], "dosfstools": [ { "arch": "x86_64", "epoch": null, "name": "dosfstools", "release": "3.fc36", "source": "rpm", "version": "4.2" } ], "dracut": [ { "arch": "x86_64", "epoch": null, "name": "dracut", "release": "3.fc36", "source": "rpm", "version": "057" } ], "dracut-config-rescue": [ { "arch": "x86_64", "epoch": null, "name": "dracut-config-rescue", "release": "3.fc36", "source": "rpm", "version": "057" } ], "drpm": [ { "arch": "x86_64", "epoch": null, "name": "drpm", "release": "1.fc36", "source": "rpm", "version": "0.5.1" } ], "dyninst": [ { "arch": "x86_64", "epoch": null, "name": "dyninst", "release": "3.fc36", "source": "rpm", "version": "12.0.1" } ], "e2fsprogs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs", "release": "2.fc36", "source": "rpm", "version": "1.46.5" } ], "e2fsprogs-libs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs-libs", "release": "2.fc36", "source": "rpm", "version": "1.46.5" } ], "efivar-libs": [ { "arch": "x86_64", "epoch": null, "name": "efivar-libs", "release": "6.fc36", "source": "rpm", "version": "38" } ], "elfutils-debuginfod-client": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-debuginfod-client", "release": "2.fc36", "source": "rpm", "version": "0.189" } ], "elfutils-debuginfod-client-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-debuginfod-client-devel", "release": "2.fc36", "source": "rpm", "version": "0.189" } ], "elfutils-default-yama-scope": [ { "arch": "noarch", "epoch": null, "name": "elfutils-default-yama-scope", "release": "2.fc36", "source": "rpm", "version": "0.189" } ], "elfutils-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-devel", "release": "2.fc36", "source": "rpm", "version": "0.189" } ], "elfutils-libelf": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libelf", "release": "2.fc36", "source": "rpm", "version": "0.189" } ], "elfutils-libelf-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libelf-devel", "release": "2.fc36", "source": "rpm", "version": "0.189" } ], "elfutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libs", "release": "2.fc36", "source": "rpm", "version": "0.189" } ], "expat": [ { "arch": "x86_64", "epoch": null, "name": "expat", "release": "1.fc36", "source": "rpm", "version": "2.5.0" } ], "fedora-gpg-keys": [ { "arch": "noarch", "epoch": null, "name": "fedora-gpg-keys", "release": "5", "source": "rpm", "version": "36" } ], "fedora-release": [ { "arch": "noarch", "epoch": null, "name": "fedora-release", "release": "21", "source": "rpm", "version": "36" } ], "fedora-release-common": [ { "arch": "noarch", "epoch": null, "name": "fedora-release-common", "release": "21", "source": "rpm", "version": "36" } ], "fedora-release-identity-basic": [ { "arch": "noarch", "epoch": null, "name": "fedora-release-identity-basic", "release": "21", "source": "rpm", "version": "36" } ], "fedora-repos": [ { "arch": "noarch", "epoch": null, "name": "fedora-repos", "release": "5", "source": "rpm", "version": "36" } ], "fedora-repos-modular": [ { "arch": "noarch", "epoch": null, "name": "fedora-repos-modular", "release": "5", "source": "rpm", "version": "36" } ], "file": [ { "arch": "x86_64", "epoch": null, "name": "file", "release": "4.fc36", "source": "rpm", "version": "5.41" } ], "file-libs": [ { "arch": "x86_64", "epoch": null, "name": "file-libs", "release": "4.fc36", "source": "rpm", "version": "5.41" } ], "filesystem": [ { "arch": "x86_64", "epoch": null, "name": "filesystem", "release": "2.fc36", "source": "rpm", "version": "3.18" } ], "findutils": [ { "arch": "x86_64", "epoch": 1, "name": "findutils", "release": "1.fc36", "source": "rpm", "version": "4.9.0" } ], "firewalld": [ { "arch": "noarch", "epoch": null, "name": "firewalld", "release": "1.fc36", "source": "rpm", "version": "1.2.5" } ], "firewalld-filesystem": [ { "arch": "noarch", "epoch": null, "name": "firewalld-filesystem", "release": "1.fc36", "source": "rpm", "version": "1.2.5" } ], "flex": [ { "arch": "x86_64", "epoch": null, "name": "flex", "release": "10.fc36", "source": "rpm", "version": "2.6.4" } ], "fonts-filesystem": [ { "arch": "noarch", "epoch": 1, "name": "fonts-filesystem", "release": "7.fc36", "source": "rpm", "version": "2.0.5" } ], "fuse-libs": [ { "arch": "x86_64", "epoch": null, "name": "fuse-libs", "release": "14.fc36", "source": "rpm", "version": "2.9.9" } ], "fwupd": [ { "arch": "x86_64", "epoch": null, "name": "fwupd", "release": "1.fc36", "source": "rpm", "version": "1.8.6" } ], "fwupd-efi": [ { "arch": "x86_64", "epoch": null, "name": "fwupd-efi", "release": "1.fc36", "source": "rpm", "version": "1.4" } ], "fwupd-plugin-modem-manager": [ { "arch": "x86_64", "epoch": null, "name": "fwupd-plugin-modem-manager", "release": "1.fc36", "source": "rpm", "version": "1.8.6" } ], "fwupd-plugin-uefi-capsule-data": [ { "arch": "x86_64", "epoch": null, "name": "fwupd-plugin-uefi-capsule-data", "release": "1.fc36", "source": "rpm", "version": "1.8.6" } ], "gawk": [ { "arch": "x86_64", "epoch": null, "name": "gawk", "release": "2.fc36", "source": "rpm", "version": "5.1.1" } ], "gc": [ { "arch": "x86_64", "epoch": null, "name": "gc", "release": "3.fc36", "source": "rpm", "version": "8.0.6" } ], "gcc": [ { "arch": "x86_64", "epoch": null, "name": "gcc", "release": "4.fc36", "source": "rpm", "version": "12.2.1" } ], "gdbm-libs": [ { "arch": "x86_64", "epoch": 1, "name": "gdbm-libs", "release": "2.fc36", "source": "rpm", "version": "1.22" } ], "gdisk": [ { "arch": "x86_64", "epoch": null, "name": "gdisk", "release": "2.fc36", "source": "rpm", "version": "1.0.9" } ], "gettext": [ { "arch": "x86_64", "epoch": null, "name": "gettext", "release": "9.fc36", "source": "rpm", "version": "0.21" } ], "gettext-libs": [ { "arch": "x86_64", "epoch": null, "name": "gettext-libs", "release": "9.fc36", "source": "rpm", "version": "0.21" } ], "git": [ { "arch": "x86_64", "epoch": null, "name": "git", "release": "1.fc36", "source": "rpm", "version": "2.40.1" } ], "git-core": [ { "arch": "x86_64", "epoch": null, "name": "git-core", "release": "1.fc36", "source": "rpm", "version": "2.40.1" } ], "git-core-doc": [ { "arch": "noarch", "epoch": null, "name": "git-core-doc", "release": "1.fc36", "source": "rpm", "version": "2.40.1" } ], "glib2": [ { "arch": "x86_64", "epoch": null, "name": "glib2", "release": "1.fc36", "source": "rpm", "version": "2.72.3" } ], "glibc": [ { "arch": "x86_64", "epoch": null, "name": "glibc", "release": "22.fc36", "source": "rpm", "version": "2.35" } ], "glibc-common": [ { "arch": "x86_64", "epoch": null, "name": "glibc-common", "release": "22.fc36", "source": "rpm", "version": "2.35" } ], "glibc-devel": [ { "arch": "x86_64", "epoch": null, "name": "glibc-devel", "release": "22.fc36", "source": "rpm", "version": "2.35" } ], "glibc-gconv-extra": [ { "arch": "x86_64", "epoch": null, "name": "glibc-gconv-extra", "release": "22.fc36", "source": "rpm", "version": "2.35" } ], "glibc-headers-x86": [ { "arch": "noarch", "epoch": null, "name": "glibc-headers-x86", "release": "22.fc36", "source": "rpm", "version": "2.35" } ], "glibc-langpack-en": [ { "arch": "x86_64", "epoch": null, "name": "glibc-langpack-en", "release": "22.fc36", "source": "rpm", "version": "2.35" } ], "gmp": [ { "arch": "x86_64", "epoch": 1, "name": "gmp", "release": "2.fc36", "source": "rpm", "version": "6.2.1" } ], "gnupg2": [ { "arch": "x86_64", "epoch": null, "name": "gnupg2", "release": "3.fc36", "source": "rpm", "version": "2.3.7" } ], "gnupg2-smime": [ { "arch": "x86_64", "epoch": null, "name": "gnupg2-smime", "release": "3.fc36", "source": "rpm", "version": "2.3.7" } ], "gnutls": [ { "arch": "x86_64", "epoch": null, "name": "gnutls", "release": "2.fc36", "source": "rpm", "version": "3.8.0" } ], "gobject-introspection": [ { "arch": "x86_64", "epoch": null, "name": "gobject-introspection", "release": "1.fc36", "source": "rpm", "version": "1.72.1" } ], "google-noto-fonts-common": [ { "arch": "noarch", "epoch": null, "name": "google-noto-fonts-common", "release": "10.fc36", "source": "rpm", "version": "20201206" } ], "google-noto-sans-mono-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "google-noto-sans-mono-vf-fonts", "release": "10.fc36", "source": "rpm", "version": "20201206" } ], "google-noto-sans-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "google-noto-sans-vf-fonts", "release": "10.fc36", "source": "rpm", "version": "20201206" } ], "google-noto-serif-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "google-noto-serif-vf-fonts", "release": "10.fc36", "source": "rpm", "version": "20201206" } ], "gpg-pubkey": [ { "arch": null, "epoch": null, "name": "gpg-pubkey", "release": "60242b08", "source": "rpm", "version": "38ab71f4" } ], "gpgme": [ { "arch": "x86_64", "epoch": null, "name": "gpgme", "release": "4.fc36", "source": "rpm", "version": "1.17.0" } ], "gpm-libs": [ { "arch": "x86_64", "epoch": null, "name": "gpm-libs", "release": "40.fc36", "source": "rpm", "version": "1.20.7" } ], "grep": [ { "arch": "x86_64", "epoch": null, "name": "grep", "release": "2.fc36", "source": "rpm", "version": "3.7" } ], "groff-base": [ { "arch": "x86_64", "epoch": null, "name": "groff-base", "release": "9.fc36", "source": "rpm", "version": "1.22.4" } ], "grub2-common": [ { "arch": "noarch", "epoch": 1, "name": "grub2-common", "release": "62.fc36", "source": "rpm", "version": "2.06" } ], "grub2-pc": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-pc", "release": "62.fc36", "source": "rpm", "version": "2.06" } ], "grub2-pc-modules": [ { "arch": "noarch", "epoch": 1, "name": "grub2-pc-modules", "release": "62.fc36", "source": "rpm", "version": "2.06" } ], "grub2-tools": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools", "release": "62.fc36", "source": "rpm", "version": "2.06" } ], "grub2-tools-minimal": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools-minimal", "release": "62.fc36", "source": "rpm", "version": "2.06" } ], "grubby": [ { "arch": "x86_64", "epoch": null, "name": "grubby", "release": "67.fc36", "source": "rpm", "version": "8.40" } ], "gssproxy": [ { "arch": "x86_64", "epoch": null, "name": "gssproxy", "release": "7.fc36", "source": "rpm", "version": "0.8.4" } ], "guile22": [ { "arch": "x86_64", "epoch": null, "name": "guile22", "release": "5.fc36", "source": "rpm", "version": "2.2.7" } ], "gzip": [ { "arch": "x86_64", "epoch": null, "name": "gzip", "release": "3.fc36", "source": "rpm", "version": "1.11" } ], "hostname": [ { "arch": "x86_64", "epoch": null, "name": "hostname", "release": "6.fc36", "source": "rpm", "version": "3.23" } ], "hunspell": [ { "arch": "x86_64", "epoch": null, "name": "hunspell", "release": "19.fc36", "source": "rpm", "version": "1.7.0" } ], "hunspell-en": [ { "arch": "noarch", "epoch": null, "name": "hunspell-en", "release": "22.fc36", "source": "rpm", "version": "0.20140811.1" } ], "hunspell-en-GB": [ { "arch": "noarch", "epoch": null, "name": "hunspell-en-GB", "release": "22.fc36", "source": "rpm", "version": "0.20140811.1" } ], "hunspell-en-US": [ { "arch": "noarch", "epoch": null, "name": "hunspell-en-US", "release": "22.fc36", "source": "rpm", "version": "0.20140811.1" } ], "hunspell-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "hunspell-filesystem", "release": "19.fc36", "source": "rpm", "version": "1.7.0" } ], "ima-evm-utils": [ { "arch": "x86_64", "epoch": null, "name": "ima-evm-utils", "release": "5.fc36", "source": "rpm", "version": "1.4" } ], "inih": [ { "arch": "x86_64", "epoch": null, "name": "inih", "release": "1.fc36", "source": "rpm", "version": "56" } ], "iniparser": [ { "arch": "x86_64", "epoch": null, "name": "iniparser", "release": "9.fc36", "source": "rpm", "version": "4.1" } ], "initscripts": [ { "arch": "x86_64", "epoch": null, "name": "initscripts", "release": "3.fc36", "source": "rpm", "version": "10.16" } ], "initscripts-rename-device": [ { "arch": "x86_64", "epoch": null, "name": "initscripts-rename-device", "release": "3.fc36", "source": "rpm", "version": "10.16" } ], "initscripts-service": [ { "arch": "noarch", "epoch": null, "name": "initscripts-service", "release": "3.fc36", "source": "rpm", "version": "10.16" } ], "intel-gpu-firmware": [ { "arch": "noarch", "epoch": null, "name": "intel-gpu-firmware", "release": "148.fc36", "source": "rpm", "version": "20230310" } ], "ipcalc": [ { "arch": "x86_64", "epoch": null, "name": "ipcalc", "release": "3.fc36", "source": "rpm", "version": "1.0.1" } ], "iproute": [ { "arch": "x86_64", "epoch": null, "name": "iproute", "release": "2.fc36", "source": "rpm", "version": "5.15.0" } ], "ipset": [ { "arch": "x86_64", "epoch": null, "name": "ipset", "release": "5.fc36.1", "source": "rpm", "version": "7.15" } ], "ipset-libs": [ { "arch": "x86_64", "epoch": null, "name": "ipset-libs", "release": "5.fc36.1", "source": "rpm", "version": "7.15" } ], "iptables-libs": [ { "arch": "x86_64", "epoch": null, "name": "iptables-libs", "release": "15.fc36", "source": "rpm", "version": "1.8.7" } ], "iptables-nft": [ { "arch": "x86_64", "epoch": null, "name": "iptables-nft", "release": "15.fc36", "source": "rpm", "version": "1.8.7" } ], "iputils": [ { "arch": "x86_64", "epoch": null, "name": "iputils", "release": "1.fc36", "source": "rpm", "version": "20221126" } ], "jansson": [ { "arch": "x86_64", "epoch": null, "name": "jansson", "release": "4.fc36", "source": "rpm", "version": "2.13.1" } ], "jitterentropy": [ { "arch": "x86_64", "epoch": null, "name": "jitterentropy", "release": "3.fc36", "source": "rpm", "version": "3.4.1" } ], "json-c": [ { "arch": "x86_64", "epoch": null, "name": "json-c", "release": "3.fc36", "source": "rpm", "version": "0.15" } ], "json-glib": [ { "arch": "x86_64", "epoch": null, "name": "json-glib", "release": "2.fc36", "source": "rpm", "version": "1.6.6" } ], "kbd": [ { "arch": "x86_64", "epoch": null, "name": "kbd", "release": "9.fc36", "source": "rpm", "version": "2.4.0" } ], "kbd-misc": [ { "arch": "noarch", "epoch": null, "name": "kbd-misc", "release": "9.fc36", "source": "rpm", "version": "2.4.0" } ], "kernel": [ { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "100.fc36", "source": "rpm", "version": "6.2.15" } ], "kernel-core": [ { "arch": "x86_64", "epoch": null, "name": "kernel-core", "release": "100.fc36", "source": "rpm", "version": "6.2.15" } ], "kernel-devel": [ { "arch": "x86_64", "epoch": null, "name": "kernel-devel", "release": "100.fc36", "source": "rpm", "version": "6.2.15" } ], "kernel-headers": [ { "arch": "x86_64", "epoch": null, "name": "kernel-headers", "release": "100.fc36", "source": "rpm", "version": "6.2.6" } ], "kernel-modules": [ { "arch": "x86_64", "epoch": null, "name": "kernel-modules", "release": "100.fc36", "source": "rpm", "version": "6.2.15" } ], "kernel-modules-core": [ { "arch": "x86_64", "epoch": null, "name": "kernel-modules-core", "release": "100.fc36", "source": "rpm", "version": "6.2.15" } ], "keyutils": [ { "arch": "x86_64", "epoch": null, "name": "keyutils", "release": "4.fc36", "source": "rpm", "version": "1.6.1" } ], "keyutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "keyutils-libs", "release": "4.fc36", "source": "rpm", "version": "1.6.1" } ], "kmod": [ { "arch": "x86_64", "epoch": null, "name": "kmod", "release": "7.fc36", "source": "rpm", "version": "29" } ], "kmod-libs": [ { "arch": "x86_64", "epoch": null, "name": "kmod-libs", "release": "7.fc36", "source": "rpm", "version": "29" } ], "kpartx": [ { "arch": "x86_64", "epoch": null, "name": "kpartx", "release": "9.fc36", "source": "rpm", "version": "0.8.7" } ], "krb5-libs": [ { "arch": "x86_64", "epoch": null, "name": "krb5-libs", "release": "12.fc36", "source": "rpm", "version": "1.19.2" } ], "langpacks-core-en": [ { "arch": "noarch", "epoch": null, "name": "langpacks-core-en", "release": "25.fc36", "source": "rpm", "version": "3.0" } ], "langpacks-core-font-en": [ { "arch": "noarch", "epoch": null, "name": "langpacks-core-font-en", "release": "25.fc36", "source": "rpm", "version": "3.0" } ], "langpacks-en": [ { "arch": "noarch", "epoch": null, "name": "langpacks-en", "release": "25.fc36", "source": "rpm", "version": "3.0" } ], "less": [ { "arch": "x86_64", "epoch": null, "name": "less", "release": "5.fc36", "source": "rpm", "version": "590" } ], "libacl": [ { "arch": "x86_64", "epoch": null, "name": "libacl", "release": "3.fc36", "source": "rpm", "version": "2.3.1" } ], "libaio": [ { "arch": "x86_64", "epoch": null, "name": "libaio", "release": "13.fc36", "source": "rpm", "version": "0.3.111" } ], "libarchive": [ { "arch": "x86_64", "epoch": null, "name": "libarchive", "release": "3.fc36", "source": "rpm", "version": "3.5.3" } ], "libargon2": [ { "arch": "x86_64", "epoch": null, "name": "libargon2", "release": "9.fc36", "source": "rpm", "version": "20171227" } ], "libassuan": [ { "arch": "x86_64", "epoch": null, "name": "libassuan", "release": "4.fc36", "source": "rpm", "version": "2.5.5" } ], "libatasmart": [ { "arch": "x86_64", "epoch": null, "name": "libatasmart", "release": "22.fc36", "source": "rpm", "version": "0.19" } ], "libattr": [ { "arch": "x86_64", "epoch": null, "name": "libattr", "release": "4.fc36", "source": "rpm", "version": "2.5.1" } ], "libbasicobjects": [ { "arch": "x86_64", "epoch": null, "name": "libbasicobjects", "release": "50.fc36", "source": "rpm", "version": "0.1.1" } ], "libblkid": [ { "arch": "x86_64", "epoch": null, "name": "libblkid", "release": "1.fc36", "source": "rpm", "version": "2.38" } ], "libblockdev": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-btrfs": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-btrfs", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-crypto": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-crypto", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-dm": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-dm", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-fs": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-fs", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-kbd": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-kbd", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-loop": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-loop", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-lvm": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-lvm", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-mdraid": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-mdraid", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-mpath": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-mpath", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-nvdimm": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-nvdimm", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-part": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-part", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-swap": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-swap", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libblockdev-utils": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-utils", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "libbpf": [ { "arch": "x86_64", "epoch": 2, "name": "libbpf", "release": "3.fc36", "source": "rpm", "version": "0.7.0" } ], "libbrotli": [ { "arch": "x86_64", "epoch": null, "name": "libbrotli", "release": "7.fc36", "source": "rpm", "version": "1.0.9" } ], "libbytesize": [ { "arch": "x86_64", "epoch": null, "name": "libbytesize", "release": "1.fc36", "source": "rpm", "version": "2.7" } ], "libcap": [ { "arch": "x86_64", "epoch": null, "name": "libcap", "release": "4.fc36", "source": "rpm", "version": "2.48" } ], "libcap-ng": [ { "arch": "x86_64", "epoch": null, "name": "libcap-ng", "release": "1.fc36", "source": "rpm", "version": "0.8.3" } ], "libcap-ng-python3": [ { "arch": "x86_64", "epoch": null, "name": "libcap-ng-python3", "release": "1.fc36", "source": "rpm", "version": "0.8.3" } ], "libcbor": [ { "arch": "x86_64", "epoch": null, "name": "libcbor", "release": "5.fc36", "source": "rpm", "version": "0.7.0" } ], "libcollection": [ { "arch": "x86_64", "epoch": null, "name": "libcollection", "release": "50.fc36", "source": "rpm", "version": "0.7.0" } ], "libcom_err": [ { "arch": "x86_64", "epoch": null, "name": "libcom_err", "release": "2.fc36", "source": "rpm", "version": "1.46.5" } ], "libcomps": [ { "arch": "x86_64", "epoch": null, "name": "libcomps", "release": "2.fc36", "source": "rpm", "version": "0.1.18" } ], "libcurl": [ { "arch": "x86_64", "epoch": null, "name": "libcurl", "release": "14.fc36", "source": "rpm", "version": "7.82.0" } ], "libdb": [ { "arch": "x86_64", "epoch": null, "name": "libdb", "release": "51.fc36", "source": "rpm", "version": "5.3.28" } ], "libdhash": [ { "arch": "x86_64", "epoch": null, "name": "libdhash", "release": "50.fc36", "source": "rpm", "version": "0.5.0" } ], "libdnf": [ { "arch": "x86_64", "epoch": null, "name": "libdnf", "release": "1.fc36", "source": "rpm", "version": "0.70.0" } ], "libeconf": [ { "arch": "x86_64", "epoch": null, "name": "libeconf", "release": "3.fc36", "source": "rpm", "version": "0.4.0" } ], "libedit": [ { "arch": "x86_64", "epoch": null, "name": "libedit", "release": "41.20210910cvs.fc36", "source": "rpm", "version": "3.1" } ], "libev": [ { "arch": "x86_64", "epoch": null, "name": "libev", "release": "5.fc36", "source": "rpm", "version": "4.33" } ], "libevent": [ { "arch": "x86_64", "epoch": null, "name": "libevent", "release": "6.fc36", "source": "rpm", "version": "2.1.12" } ], "libfdisk": [ { "arch": "x86_64", "epoch": null, "name": "libfdisk", "release": "1.fc36", "source": "rpm", "version": "2.38" } ], "libffi": [ { "arch": "x86_64", "epoch": null, "name": "libffi", "release": "8.fc36", "source": "rpm", "version": "3.4.2" } ], "libfido2": [ { "arch": "x86_64", "epoch": null, "name": "libfido2", "release": "5.fc36", "source": "rpm", "version": "1.10.0" } ], "libfsverity": [ { "arch": "x86_64", "epoch": null, "name": "libfsverity", "release": "7.fc36", "source": "rpm", "version": "1.4" } ], "libgcab1": [ { "arch": "x86_64", "epoch": null, "name": "libgcab1", "release": "6.fc36", "source": "rpm", "version": "1.4" } ], "libgcc": [ { "arch": "x86_64", "epoch": null, "name": "libgcc", "release": "4.fc36", "source": "rpm", "version": "12.2.1" } ], "libgcrypt": [ { "arch": "x86_64", "epoch": null, "name": "libgcrypt", "release": "3.fc36", "source": "rpm", "version": "1.10.1" } ], "libgomp": [ { "arch": "x86_64", "epoch": null, "name": "libgomp", "release": "4.fc36", "source": "rpm", "version": "12.2.1" } ], "libgpg-error": [ { "arch": "x86_64", "epoch": null, "name": "libgpg-error", "release": "1.fc36", "source": "rpm", "version": "1.45" } ], "libgudev": [ { "arch": "x86_64", "epoch": null, "name": "libgudev", "release": "2.fc36", "source": "rpm", "version": "237" } ], "libgusb": [ { "arch": "x86_64", "epoch": null, "name": "libgusb", "release": "2.fc36", "source": "rpm", "version": "0.3.10" } ], "libibverbs": [ { "arch": "x86_64", "epoch": null, "name": "libibverbs", "release": "1.fc36", "source": "rpm", "version": "39.0" } ], "libicu": [ { "arch": "x86_64", "epoch": null, "name": "libicu", "release": "6.fc36", "source": "rpm", "version": "69.1" } ], "libidn2": [ { "arch": "x86_64", "epoch": null, "name": "libidn2", "release": "1.fc36", "source": "rpm", "version": "2.3.4" } ], "libini_config": [ { "arch": "x86_64", "epoch": null, "name": "libini_config", "release": "50.fc36", "source": "rpm", "version": "1.3.1" } ], "libjcat": [ { "arch": "x86_64", "epoch": null, "name": "libjcat", "release": "1.fc36", "source": "rpm", "version": "0.1.12" } ], "libkcapi": [ { "arch": "x86_64", "epoch": null, "name": "libkcapi", "release": "2.fc36", "source": "rpm", "version": "1.4.0" } ], "libkcapi-hmaccalc": [ { "arch": "x86_64", "epoch": null, "name": "libkcapi-hmaccalc", "release": "2.fc36", "source": "rpm", "version": "1.4.0" } ], "libksba": [ { "arch": "x86_64", "epoch": null, "name": "libksba", "release": "1.fc36", "source": "rpm", "version": "1.6.3" } ], "libldb": [ { "arch": "x86_64", "epoch": null, "name": "libldb", "release": "1.fc36", "source": "rpm", "version": "2.5.3" } ], "libmaxminddb": [ { "arch": "x86_64", "epoch": null, "name": "libmaxminddb", "release": "1.fc36", "source": "rpm", "version": "1.7.1" } ], "libmbim": [ { "arch": "x86_64", "epoch": null, "name": "libmbim", "release": "1.fc36", "source": "rpm", "version": "1.26.4" } ], "libmetalink": [ { "arch": "x86_64", "epoch": null, "name": "libmetalink", "release": "25.fc36", "source": "rpm", "version": "0.1.3" } ], "libmnl": [ { "arch": "x86_64", "epoch": null, "name": "libmnl", "release": "15.fc36", "source": "rpm", "version": "1.0.4" } ], "libmodulemd": [ { "arch": "x86_64", "epoch": null, "name": "libmodulemd", "release": "2.fc36", "source": "rpm", "version": "2.14.0" } ], "libmount": [ { "arch": "x86_64", "epoch": null, "name": "libmount", "release": "1.fc36", "source": "rpm", "version": "2.38" } ], "libmpc": [ { "arch": "x86_64", "epoch": null, "name": "libmpc", "release": "4.fc36", "source": "rpm", "version": "1.2.1" } ], "libndp": [ { "arch": "x86_64", "epoch": null, "name": "libndp", "release": "3.fc36", "source": "rpm", "version": "1.8" } ], "libnetfilter_conntrack": [ { "arch": "x86_64", "epoch": null, "name": "libnetfilter_conntrack", "release": "4.fc36", "source": "rpm", "version": "1.0.8" } ], "libnfnetlink": [ { "arch": "x86_64", "epoch": null, "name": "libnfnetlink", "release": "21.fc36", "source": "rpm", "version": "1.0.1" } ], "libnfsidmap": [ { "arch": "x86_64", "epoch": 1, "name": "libnfsidmap", "release": "0.fc36", "source": "rpm", "version": "2.6.2" } ], "libnftnl": [ { "arch": "x86_64", "epoch": null, "name": "libnftnl", "release": "2.fc36", "source": "rpm", "version": "1.2.1" } ], "libnghttp2": [ { "arch": "x86_64", "epoch": null, "name": "libnghttp2", "release": "1.fc36", "source": "rpm", "version": "1.51.0" } ], "libnl3": [ { "arch": "x86_64", "epoch": null, "name": "libnl3", "release": "1.fc36", "source": "rpm", "version": "3.7.0" } ], "libnsl2": [ { "arch": "x86_64", "epoch": null, "name": "libnsl2", "release": "3.fc36", "source": "rpm", "version": "2.0.0" } ], "libpath_utils": [ { "arch": "x86_64", "epoch": null, "name": "libpath_utils", "release": "50.fc36", "source": "rpm", "version": "0.2.1" } ], "libpcap": [ { "arch": "x86_64", "epoch": 14, "name": "libpcap", "release": "1.fc36", "source": "rpm", "version": "1.10.4" } ], "libpipeline": [ { "arch": "x86_64", "epoch": null, "name": "libpipeline", "release": "2.fc36", "source": "rpm", "version": "1.5.5" } ], "libpkgconf": [ { "arch": "x86_64", "epoch": null, "name": "libpkgconf", "release": "2.fc36", "source": "rpm", "version": "1.8.0" } ], "libpsl": [ { "arch": "x86_64", "epoch": null, "name": "libpsl", "release": "5.fc36", "source": "rpm", "version": "0.21.1" } ], "libpwquality": [ { "arch": "x86_64", "epoch": null, "name": "libpwquality", "release": "7.fc36", "source": "rpm", "version": "1.4.4" } ], "libqmi": [ { "arch": "x86_64", "epoch": null, "name": "libqmi", "release": "1.fc36", "source": "rpm", "version": "1.30.6" } ], "libqrtr-glib": [ { "arch": "x86_64", "epoch": null, "name": "libqrtr-glib", "release": "3.fc36", "source": "rpm", "version": "1.0.0" } ], "libref_array": [ { "arch": "x86_64", "epoch": null, "name": "libref_array", "release": "50.fc36", "source": "rpm", "version": "0.1.5" } ], "librepo": [ { "arch": "x86_64", "epoch": null, "name": "librepo", "release": "1.fc36", "source": "rpm", "version": "1.15.1" } ], "libreport-filesystem": [ { "arch": "noarch", "epoch": null, "name": "libreport-filesystem", "release": "1.fc36", "source": "rpm", "version": "2.17.4" } ], "libseccomp": [ { "arch": "x86_64", "epoch": null, "name": "libseccomp", "release": "2.fc36", "source": "rpm", "version": "2.5.3" } ], "libselinux": [ { "arch": "x86_64", "epoch": null, "name": "libselinux", "release": "4.fc36", "source": "rpm", "version": "3.3" } ], "libselinux-utils": [ { "arch": "x86_64", "epoch": null, "name": "libselinux-utils", "release": "4.fc36", "source": "rpm", "version": "3.3" } ], "libsemanage": [ { "arch": "x86_64", "epoch": null, "name": "libsemanage", "release": "3.fc36", "source": "rpm", "version": "3.3" } ], "libsepol": [ { "arch": "x86_64", "epoch": null, "name": "libsepol", "release": "3.fc36", "source": "rpm", "version": "3.3" } ], "libsigsegv": [ { "arch": "x86_64", "epoch": null, "name": "libsigsegv", "release": "2.fc36", "source": "rpm", "version": "2.14" } ], "libsmartcols": [ { "arch": "x86_64", "epoch": null, "name": "libsmartcols", "release": "1.fc36", "source": "rpm", "version": "2.38" } ], "libsmbios": [ { "arch": "x86_64", "epoch": null, "name": "libsmbios", "release": "5.fc36", "source": "rpm", "version": "2.4.3" } ], "libsodium": [ { "arch": "x86_64", "epoch": null, "name": "libsodium", "release": "9.fc36", "source": "rpm", "version": "1.0.18" } ], "libsolv": [ { "arch": "x86_64", "epoch": null, "name": "libsolv", "release": "1.fc36", "source": "rpm", "version": "0.7.22" } ], "libss": [ { "arch": "x86_64", "epoch": null, "name": "libss", "release": "2.fc36", "source": "rpm", "version": "1.46.5" } ], "libssh": [ { "arch": "x86_64", "epoch": null, "name": "libssh", "release": "4.fc36", "source": "rpm", "version": "0.9.6" } ], "libssh-config": [ { "arch": "noarch", "epoch": null, "name": "libssh-config", "release": "4.fc36", "source": "rpm", "version": "0.9.6" } ], "libsss_certmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_certmap", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "libsss_idmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_idmap", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "libsss_nss_idmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_nss_idmap", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "libsss_sudo": [ { "arch": "x86_64", "epoch": null, "name": "libsss_sudo", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "libstdc++": [ { "arch": "x86_64", "epoch": null, "name": "libstdc++", "release": "4.fc36", "source": "rpm", "version": "12.2.1" } ], "libtalloc": [ { "arch": "x86_64", "epoch": null, "name": "libtalloc", "release": "1.fc36", "source": "rpm", "version": "2.3.4" } ], "libtasn1": [ { "arch": "x86_64", "epoch": null, "name": "libtasn1", "release": "1.fc36", "source": "rpm", "version": "4.19.0" } ], "libtdb": [ { "arch": "x86_64", "epoch": null, "name": "libtdb", "release": "1.fc36", "source": "rpm", "version": "1.4.7" } ], "libtevent": [ { "arch": "x86_64", "epoch": null, "name": "libtevent", "release": "1.fc36", "source": "rpm", "version": "0.12.1" } ], "libtirpc": [ { "arch": "x86_64", "epoch": null, "name": "libtirpc", "release": "0.fc36", "source": "rpm", "version": "1.3.3" } ], "libtool-ltdl": [ { "arch": "x86_64", "epoch": null, "name": "libtool-ltdl", "release": "1.fc36", "source": "rpm", "version": "2.4.7" } ], "libudisks2": [ { "arch": "x86_64", "epoch": null, "name": "libudisks2", "release": "4.fc36", "source": "rpm", "version": "2.9.4" } ], "libunistring": [ { "arch": "x86_64", "epoch": null, "name": "libunistring", "release": "1.fc36", "source": "rpm", "version": "1.0" } ], "libusb1": [ { "arch": "x86_64", "epoch": null, "name": "libusb1", "release": "8.fc36", "source": "rpm", "version": "1.0.25" } ], "libuser": [ { "arch": "x86_64", "epoch": null, "name": "libuser", "release": "10.fc36", "source": "rpm", "version": "0.63" } ], "libutempter": [ { "arch": "x86_64", "epoch": null, "name": "libutempter", "release": "6.fc36", "source": "rpm", "version": "1.2.1" } ], "libuuid": [ { "arch": "x86_64", "epoch": null, "name": "libuuid", "release": "1.fc36", "source": "rpm", "version": "2.38" } ], "libverto": [ { "arch": "x86_64", "epoch": null, "name": "libverto", "release": "3.fc36", "source": "rpm", "version": "0.3.2" } ], "libverto-libev": [ { "arch": "x86_64", "epoch": null, "name": "libverto-libev", "release": "3.fc36", "source": "rpm", "version": "0.3.2" } ], "libxcrypt": [ { "arch": "x86_64", "epoch": null, "name": "libxcrypt", "release": "4.fc36", "source": "rpm", "version": "4.4.33" } ], "libxcrypt-compat": [ { "arch": "x86_64", "epoch": null, "name": "libxcrypt-compat", "release": "4.fc36", "source": "rpm", "version": "4.4.33" } ], "libxcrypt-devel": [ { "arch": "x86_64", "epoch": null, "name": "libxcrypt-devel", "release": "4.fc36", "source": "rpm", "version": "4.4.33" } ], "libxkbcommon": [ { "arch": "x86_64", "epoch": null, "name": "libxkbcommon", "release": "1.fc36", "source": "rpm", "version": "1.4.0" } ], "libxml2": [ { "arch": "x86_64", "epoch": null, "name": "libxml2", "release": "2.fc36", "source": "rpm", "version": "2.10.3" } ], "libxmlb": [ { "arch": "x86_64", "epoch": null, "name": "libxmlb", "release": "1.fc36", "source": "rpm", "version": "0.3.10" } ], "libxslt": [ { "arch": "x86_64", "epoch": null, "name": "libxslt", "release": "1.fc36", "source": "rpm", "version": "1.1.37" } ], "libyaml": [ { "arch": "x86_64", "epoch": null, "name": "libyaml", "release": "7.fc36", "source": "rpm", "version": "0.2.5" } ], "libzstd": [ { "arch": "x86_64", "epoch": null, "name": "libzstd", "release": "1.fc36", "source": "rpm", "version": "1.5.5" } ], "libzstd-devel": [ { "arch": "x86_64", "epoch": null, "name": "libzstd-devel", "release": "1.fc36", "source": "rpm", "version": "1.5.5" } ], "linux-firmware": [ { "arch": "noarch", "epoch": null, "name": "linux-firmware", "release": "148.fc36", "source": "rpm", "version": "20230310" } ], "linux-firmware-whence": [ { "arch": "noarch", "epoch": null, "name": "linux-firmware-whence", "release": "148.fc36", "source": "rpm", "version": "20230310" } ], "lmdb-libs": [ { "arch": "x86_64", "epoch": null, "name": "lmdb-libs", "release": "1.fc36", "source": "rpm", "version": "0.9.30" } ], "lsof": [ { "arch": "x86_64", "epoch": null, "name": "lsof", "release": "3.fc36", "source": "rpm", "version": "4.94.0" } ], "lua-libs": [ { "arch": "x86_64", "epoch": null, "name": "lua-libs", "release": "9.fc36", "source": "rpm", "version": "5.4.4" } ], "lvm2": [ { "arch": "x86_64", "epoch": null, "name": "lvm2", "release": "7.fc36", "source": "rpm", "version": "2.03.11" } ], "lvm2-libs": [ { "arch": "x86_64", "epoch": null, "name": "lvm2-libs", "release": "7.fc36", "source": "rpm", "version": "2.03.11" } ], "lz4-libs": [ { "arch": "x86_64", "epoch": null, "name": "lz4-libs", "release": "4.fc36", "source": "rpm", "version": "1.9.3" } ], "lzo": [ { "arch": "x86_64", "epoch": null, "name": "lzo", "release": "6.fc36", "source": "rpm", "version": "2.10" } ], "m4": [ { "arch": "x86_64", "epoch": null, "name": "m4", "release": "3.fc36", "source": "rpm", "version": "1.4.19" } ], "make": [ { "arch": "x86_64", "epoch": 1, "name": "make", "release": "7.fc36", "source": "rpm", "version": "4.3" } ], "man-db": [ { "arch": "x86_64", "epoch": null, "name": "man-db", "release": "3.fc36", "source": "rpm", "version": "2.10.0" } ], "mdadm": [ { "arch": "x86_64", "epoch": null, "name": "mdadm", "release": "1.fc36", "source": "rpm", "version": "4.2" } ], "mkpasswd": [ { "arch": "x86_64", "epoch": null, "name": "mkpasswd", "release": "1.fc36", "source": "rpm", "version": "5.5.15" } ], "mokutil": [ { "arch": "x86_64", "epoch": 2, "name": "mokutil", "release": "3.fc36", "source": "rpm", "version": "0.6.0" } ], "mozjs91": [ { "arch": "x86_64", "epoch": null, "name": "mozjs91", "release": "1.fc36", "source": "rpm", "version": "91.13.0" } ], "mpdecimal": [ { "arch": "x86_64", "epoch": null, "name": "mpdecimal", "release": "3.fc36", "source": "rpm", "version": "2.5.1" } ], "mpfr": [ { "arch": "x86_64", "epoch": null, "name": "mpfr", "release": "9.fc36", "source": "rpm", "version": "4.1.0" } ], "ncurses": [ { "arch": "x86_64", "epoch": null, "name": "ncurses", "release": "9.20210508.fc36", "source": "rpm", "version": "6.2" } ], "ncurses-base": [ { "arch": "noarch", "epoch": null, "name": "ncurses-base", "release": "9.20210508.fc36", "source": "rpm", "version": "6.2" } ], "ncurses-libs": [ { "arch": "x86_64", "epoch": null, "name": "ncurses-libs", "release": "9.20210508.fc36", "source": "rpm", "version": "6.2" } ], "ndctl": [ { "arch": "x86_64", "epoch": null, "name": "ndctl", "release": "1.fc36", "source": "rpm", "version": "76.1" } ], "ndctl-libs": [ { "arch": "x86_64", "epoch": null, "name": "ndctl-libs", "release": "1.fc36", "source": "rpm", "version": "76.1" } ], "net-tools": [ { "arch": "x86_64", "epoch": null, "name": "net-tools", "release": "0.62.20160912git.fc36", "source": "rpm", "version": "2.0" } ], "nettle": [ { "arch": "x86_64", "epoch": null, "name": "nettle", "release": "1.fc36", "source": "rpm", "version": "3.8" } ], "nfs-utils": [ { "arch": "x86_64", "epoch": 1, "name": "nfs-utils", "release": "0.fc36", "source": "rpm", "version": "2.6.2" } ], "nftables": [ { "arch": "x86_64", "epoch": 1, "name": "nftables", "release": "3.fc36", "source": "rpm", "version": "1.0.1" } ], "npth": [ { "arch": "x86_64", "epoch": null, "name": "npth", "release": "8.fc36", "source": "rpm", "version": "1.6" } ], "nspr": [ { "arch": "x86_64", "epoch": null, "name": "nspr", "release": "5.fc36", "source": "rpm", "version": "4.35.0" } ], "nss": [ { "arch": "x86_64", "epoch": null, "name": "nss", "release": "1.fc36", "source": "rpm", "version": "3.89.0" } ], "nss-softokn": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn", "release": "1.fc36", "source": "rpm", "version": "3.89.0" } ], "nss-softokn-freebl": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn-freebl", "release": "1.fc36", "source": "rpm", "version": "3.89.0" } ], "nss-sysinit": [ { "arch": "x86_64", "epoch": null, "name": "nss-sysinit", "release": "1.fc36", "source": "rpm", "version": "3.89.0" } ], "nss-util": [ { "arch": "x86_64", "epoch": null, "name": "nss-util", "release": "1.fc36", "source": "rpm", "version": "3.89.0" } ], "ntfs-3g": [ { "arch": "x86_64", "epoch": 2, "name": "ntfs-3g", "release": "1.fc36", "source": "rpm", "version": "2022.10.3" } ], "ntfs-3g-libs": [ { "arch": "x86_64", "epoch": 2, "name": "ntfs-3g-libs", "release": "1.fc36", "source": "rpm", "version": "2022.10.3" } ], "ntfs-3g-system-compression": [ { "arch": "x86_64", "epoch": null, "name": "ntfs-3g-system-compression", "release": "9.fc36", "source": "rpm", "version": "1.0" } ], "ntfsprogs": [ { "arch": "x86_64", "epoch": 2, "name": "ntfsprogs", "release": "1.fc36", "source": "rpm", "version": "2022.10.3" } ], "nvidia-gpu-firmware": [ { "arch": "noarch", "epoch": null, "name": "nvidia-gpu-firmware", "release": "148.fc36", "source": "rpm", "version": "20230310" } ], "openldap": [ { "arch": "x86_64", "epoch": null, "name": "openldap", "release": "1.fc36", "source": "rpm", "version": "2.6.4" } ], "openssh": [ { "arch": "x86_64", "epoch": null, "name": "openssh", "release": "1.fc36.1", "source": "rpm", "version": "8.8p1" } ], "openssh-clients": [ { "arch": "x86_64", "epoch": null, "name": "openssh-clients", "release": "1.fc36.1", "source": "rpm", "version": "8.8p1" } ], "openssh-server": [ { "arch": "x86_64", "epoch": null, "name": "openssh-server", "release": "1.fc36.1", "source": "rpm", "version": "8.8p1" } ], "openssl": [ { "arch": "x86_64", "epoch": 1, "name": "openssl", "release": "1.fc36", "source": "rpm", "version": "3.0.8" } ], "openssl-devel": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-devel", "release": "1.fc36", "source": "rpm", "version": "3.0.8" } ], "openssl-libs": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-libs", "release": "1.fc36", "source": "rpm", "version": "3.0.8" } ], "openssl-pkcs11": [ { "arch": "x86_64", "epoch": null, "name": "openssl-pkcs11", "release": "2.fc36", "source": "rpm", "version": "0.4.12" } ], "os-prober": [ { "arch": "x86_64", "epoch": null, "name": "os-prober", "release": "9.fc36", "source": "rpm", "version": "1.77" } ], "p11-kit": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit", "release": "2.fc36", "source": "rpm", "version": "0.24.1" } ], "p11-kit-trust": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit-trust", "release": "2.fc36", "source": "rpm", "version": "0.24.1" } ], "pam": [ { "arch": "x86_64", "epoch": null, "name": "pam", "release": "13.fc36", "source": "rpm", "version": "1.5.2" } ], "pam-libs": [ { "arch": "x86_64", "epoch": null, "name": "pam-libs", "release": "13.fc36", "source": "rpm", "version": "1.5.2" } ], "parted": [ { "arch": "x86_64", "epoch": null, "name": "parted", "release": "13.fc36", "source": "rpm", "version": "3.4" } ], "passwd": [ { "arch": "x86_64", "epoch": null, "name": "passwd", "release": "12.fc36", "source": "rpm", "version": "0.80" } ], "pcre": [ { "arch": "x86_64", "epoch": null, "name": "pcre", "release": "1.fc36.1", "source": "rpm", "version": "8.45" } ], "pcre2": [ { "arch": "x86_64", "epoch": null, "name": "pcre2", "release": "1.fc36", "source": "rpm", "version": "10.40" } ], "pcre2-syntax": [ { "arch": "noarch", "epoch": null, "name": "pcre2-syntax", "release": "1.fc36", "source": "rpm", "version": "10.40" } ], "pcsc-lite": [ { "arch": "x86_64", "epoch": null, "name": "pcsc-lite", "release": "1.fc36", "source": "rpm", "version": "1.9.8" } ], "pcsc-lite-ccid": [ { "arch": "x86_64", "epoch": null, "name": "pcsc-lite-ccid", "release": "1.fc36", "source": "rpm", "version": "1.5.0" } ], "pcsc-lite-libs": [ { "arch": "x86_64", "epoch": null, "name": "pcsc-lite-libs", "release": "1.fc36", "source": "rpm", "version": "1.9.8" } ], "perl-AutoLoader": [ { "arch": "noarch", "epoch": 0, "name": "perl-AutoLoader", "release": "486.fc36", "source": "rpm", "version": "5.74" } ], "perl-B": [ { "arch": "x86_64", "epoch": 0, "name": "perl-B", "release": "486.fc36", "source": "rpm", "version": "1.82" } ], "perl-Carp": [ { "arch": "noarch", "epoch": null, "name": "perl-Carp", "release": "479.fc36", "source": "rpm", "version": "1.52" } ], "perl-Class-Struct": [ { "arch": "noarch", "epoch": 0, "name": "perl-Class-Struct", "release": "486.fc36", "source": "rpm", "version": "0.66" } ], "perl-Data-Dumper": [ { "arch": "x86_64", "epoch": null, "name": "perl-Data-Dumper", "release": "3.fc36", "source": "rpm", "version": "2.183" } ], "perl-Digest": [ { "arch": "noarch", "epoch": null, "name": "perl-Digest", "release": "2.fc36", "source": "rpm", "version": "1.20" } ], "perl-Digest-MD5": [ { "arch": "x86_64", "epoch": null, "name": "perl-Digest-MD5", "release": "479.fc36", "source": "rpm", "version": "2.58" } ], "perl-DynaLoader": [ { "arch": "x86_64", "epoch": 0, "name": "perl-DynaLoader", "release": "486.fc36", "source": "rpm", "version": "1.50" } ], "perl-Encode": [ { "arch": "x86_64", "epoch": 4, "name": "perl-Encode", "release": "485.fc36", "source": "rpm", "version": "3.17" } ], "perl-Errno": [ { "arch": "x86_64", "epoch": 0, "name": "perl-Errno", "release": "486.fc36", "source": "rpm", "version": "1.33" } ], "perl-Error": [ { "arch": "noarch", "epoch": 1, "name": "perl-Error", "release": "8.fc36", "source": "rpm", "version": "0.17029" } ], "perl-Exporter": [ { "arch": "noarch", "epoch": null, "name": "perl-Exporter", "release": "480.fc36", "source": "rpm", "version": "5.76" } ], "perl-Fcntl": [ { "arch": "x86_64", "epoch": 0, "name": "perl-Fcntl", "release": "486.fc36", "source": "rpm", "version": "1.14" } ], "perl-File-Basename": [ { "arch": "noarch", "epoch": 0, "name": "perl-File-Basename", "release": "486.fc36", "source": "rpm", "version": "2.85" } ], "perl-File-Find": [ { "arch": "noarch", "epoch": 0, "name": "perl-File-Find", "release": "486.fc36", "source": "rpm", "version": "1.39" } ], "perl-File-Path": [ { "arch": "noarch", "epoch": null, "name": "perl-File-Path", "release": "479.fc36", "source": "rpm", "version": "2.18" } ], "perl-File-Temp": [ { "arch": "noarch", "epoch": 1, "name": "perl-File-Temp", "release": "479.fc36", "source": "rpm", "version": "0.231.100" } ], "perl-File-stat": [ { "arch": "noarch", "epoch": 0, "name": "perl-File-stat", "release": "486.fc36", "source": "rpm", "version": "1.09" } ], "perl-FileHandle": [ { "arch": "noarch", "epoch": 0, "name": "perl-FileHandle", "release": "486.fc36", "source": "rpm", "version": "2.03" } ], "perl-Getopt-Long": [ { "arch": "noarch", "epoch": 1, "name": "perl-Getopt-Long", "release": "1.fc36", "source": "rpm", "version": "2.54" } ], "perl-Getopt-Std": [ { "arch": "noarch", "epoch": 0, "name": "perl-Getopt-Std", "release": "486.fc36", "source": "rpm", "version": "1.13" } ], "perl-Git": [ { "arch": "noarch", "epoch": null, "name": "perl-Git", "release": "1.fc36", "source": "rpm", "version": "2.40.1" } ], "perl-HTTP-Tiny": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Tiny", "release": "1.fc36", "source": "rpm", "version": "0.082" } ], "perl-IO": [ { "arch": "x86_64", "epoch": 0, "name": "perl-IO", "release": "486.fc36", "source": "rpm", "version": "1.46" } ], "perl-IO-Socket-IP": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-IP", "release": "480.fc36", "source": "rpm", "version": "0.41" } ], "perl-IO-Socket-SSL": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-SSL", "release": "2.fc36", "source": "rpm", "version": "2.074" } ], "perl-IPC-Open3": [ { "arch": "noarch", "epoch": 0, "name": "perl-IPC-Open3", "release": "486.fc36", "source": "rpm", "version": "1.21" } ], "perl-MIME-Base64": [ { "arch": "x86_64", "epoch": null, "name": "perl-MIME-Base64", "release": "479.fc36", "source": "rpm", "version": "3.16" } ], "perl-Mozilla-CA": [ { "arch": "noarch", "epoch": null, "name": "perl-Mozilla-CA", "release": "2.fc36", "source": "rpm", "version": "20211001" } ], "perl-NDBM_File": [ { "arch": "x86_64", "epoch": 0, "name": "perl-NDBM_File", "release": "486.fc36", "source": "rpm", "version": "1.15" } ], "perl-Net-SSLeay": [ { "arch": "x86_64", "epoch": null, "name": "perl-Net-SSLeay", "release": "2.fc36", "source": "rpm", "version": "1.92" } ], "perl-POSIX": [ { "arch": "x86_64", "epoch": 0, "name": "perl-POSIX", "release": "486.fc36", "source": "rpm", "version": "1.97" } ], "perl-PathTools": [ { "arch": "x86_64", "epoch": null, "name": "perl-PathTools", "release": "479.fc36", "source": "rpm", "version": "3.80" } ], "perl-Pod-Escapes": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Escapes", "release": "479.fc36", "source": "rpm", "version": "1.07" } ], "perl-Pod-Perldoc": [ { "arch": "noarch", "epoch": null, "name": "perl-Pod-Perldoc", "release": "480.fc36", "source": "rpm", "version": "3.28.01" } ], "perl-Pod-Simple": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Simple", "release": "3.fc36", "source": "rpm", "version": "3.43" } ], "perl-Pod-Usage": [ { "arch": "noarch", "epoch": 4, "name": "perl-Pod-Usage", "release": "479.fc36", "source": "rpm", "version": "2.01" } ], "perl-Scalar-List-Utils": [ { "arch": "x86_64", "epoch": 5, "name": "perl-Scalar-List-Utils", "release": "489.fc36", "source": "rpm", "version": "1.63" } ], "perl-SelectSaver": [ { "arch": "noarch", "epoch": 0, "name": "perl-SelectSaver", "release": "486.fc36", "source": "rpm", "version": "1.02" } ], "perl-Socket": [ { "arch": "x86_64", "epoch": 4, "name": "perl-Socket", "release": "1.fc36", "source": "rpm", "version": "2.036" } ], "perl-Storable": [ { "arch": "x86_64", "epoch": 1, "name": "perl-Storable", "release": "2.fc36", "source": "rpm", "version": "3.25" } ], "perl-Symbol": [ { "arch": "noarch", "epoch": 0, "name": "perl-Symbol", "release": "486.fc36", "source": "rpm", "version": "1.09" } ], "perl-Term-ANSIColor": [ { "arch": "noarch", "epoch": null, "name": "perl-Term-ANSIColor", "release": "480.fc36", "source": "rpm", "version": "5.01" } ], "perl-Term-Cap": [ { "arch": "noarch", "epoch": null, "name": "perl-Term-Cap", "release": "479.fc36", "source": "rpm", "version": "1.17" } ], "perl-TermReadKey": [ { "arch": "x86_64", "epoch": null, "name": "perl-TermReadKey", "release": "12.fc36", "source": "rpm", "version": "2.38" } ], "perl-Text-ParseWords": [ { "arch": "noarch", "epoch": null, "name": "perl-Text-ParseWords", "release": "1.fc36", "source": "rpm", "version": "3.31" } ], "perl-Text-Tabs+Wrap": [ { "arch": "noarch", "epoch": null, "name": "perl-Text-Tabs+Wrap", "release": "2.fc36", "source": "rpm", "version": "2021.0814" } ], "perl-Time-Local": [ { "arch": "noarch", "epoch": 2, "name": "perl-Time-Local", "release": "479.fc36", "source": "rpm", "version": "1.300" } ], "perl-URI": [ { "arch": "noarch", "epoch": null, "name": "perl-URI", "release": "1.fc36", "source": "rpm", "version": "5.10" } ], "perl-base": [ { "arch": "noarch", "epoch": 0, "name": "perl-base", "release": "486.fc36", "source": "rpm", "version": "2.27" } ], "perl-constant": [ { "arch": "noarch", "epoch": null, "name": "perl-constant", "release": "480.fc36", "source": "rpm", "version": "1.33" } ], "perl-if": [ { "arch": "noarch", "epoch": 0, "name": "perl-if", "release": "486.fc36", "source": "rpm", "version": "0.60.900" } ], "perl-interpreter": [ { "arch": "x86_64", "epoch": 4, "name": "perl-interpreter", "release": "486.fc36", "source": "rpm", "version": "5.34.1" } ], "perl-lib": [ { "arch": "x86_64", "epoch": 0, "name": "perl-lib", "release": "486.fc36", "source": "rpm", "version": "0.65" } ], "perl-libnet": [ { "arch": "noarch", "epoch": null, "name": "perl-libnet", "release": "480.fc36", "source": "rpm", "version": "3.13" } ], "perl-libs": [ { "arch": "x86_64", "epoch": 4, "name": "perl-libs", "release": "486.fc36", "source": "rpm", "version": "5.34.1" } ], "perl-mro": [ { "arch": "x86_64", "epoch": 0, "name": "perl-mro", "release": "486.fc36", "source": "rpm", "version": "1.25" } ], "perl-overload": [ { "arch": "noarch", "epoch": 0, "name": "perl-overload", "release": "486.fc36", "source": "rpm", "version": "1.33" } ], "perl-overloading": [ { "arch": "noarch", "epoch": 0, "name": "perl-overloading", "release": "486.fc36", "source": "rpm", "version": "0.02" } ], "perl-parent": [ { "arch": "noarch", "epoch": 1, "name": "perl-parent", "release": "479.fc36", "source": "rpm", "version": "0.238" } ], "perl-podlators": [ { "arch": "noarch", "epoch": 1, "name": "perl-podlators", "release": "479.fc36", "source": "rpm", "version": "4.14" } ], "perl-subs": [ { "arch": "noarch", "epoch": 0, "name": "perl-subs", "release": "486.fc36", "source": "rpm", "version": "1.04" } ], "perl-vars": [ { "arch": "noarch", "epoch": 0, "name": "perl-vars", "release": "486.fc36", "source": "rpm", "version": "1.05" } ], "pigz": [ { "arch": "x86_64", "epoch": null, "name": "pigz", "release": "1.fc36", "source": "rpm", "version": "2.7" } ], "pkgconf": [ { "arch": "x86_64", "epoch": null, "name": "pkgconf", "release": "2.fc36", "source": "rpm", "version": "1.8.0" } ], "pkgconf-m4": [ { "arch": "noarch", "epoch": null, "name": "pkgconf-m4", "release": "2.fc36", "source": "rpm", "version": "1.8.0" } ], "pkgconf-pkg-config": [ { "arch": "x86_64", "epoch": null, "name": "pkgconf-pkg-config", "release": "2.fc36", "source": "rpm", "version": "1.8.0" } ], "plymouth": [ { "arch": "x86_64", "epoch": null, "name": "plymouth", "release": "1.fc36", "source": "rpm", "version": "22.02.122" } ], "plymouth-core-libs": [ { "arch": "x86_64", "epoch": null, "name": "plymouth-core-libs", "release": "1.fc36", "source": "rpm", "version": "22.02.122" } ], "plymouth-scripts": [ { "arch": "x86_64", "epoch": null, "name": "plymouth-scripts", "release": "1.fc36", "source": "rpm", "version": "22.02.122" } ], "policycoreutils": [ { "arch": "x86_64", "epoch": null, "name": "policycoreutils", "release": "4.fc36", "source": "rpm", "version": "3.3" } ], "polkit": [ { "arch": "x86_64", "epoch": null, "name": "polkit", "release": "5.fc36", "source": "rpm", "version": "0.120" } ], "polkit-libs": [ { "arch": "x86_64", "epoch": null, "name": "polkit-libs", "release": "5.fc36", "source": "rpm", "version": "0.120" } ], "polkit-pkla-compat": [ { "arch": "x86_64", "epoch": null, "name": "polkit-pkla-compat", "release": "21.fc36", "source": "rpm", "version": "0.1" } ], "popt": [ { "arch": "x86_64", "epoch": null, "name": "popt", "release": "7.fc36", "source": "rpm", "version": "1.18" } ], "procps-ng": [ { "arch": "x86_64", "epoch": null, "name": "procps-ng", "release": "4.fc36.1", "source": "rpm", "version": "3.3.17" } ], "protobuf-c": [ { "arch": "x86_64", "epoch": null, "name": "protobuf-c", "release": "2.fc36", "source": "rpm", "version": "1.4.1" } ], "psmisc": [ { "arch": "x86_64", "epoch": null, "name": "psmisc", "release": "3.fc36", "source": "rpm", "version": "23.4" } ], "publicsuffix-list-dafsa": [ { "arch": "noarch", "epoch": null, "name": "publicsuffix-list-dafsa", "release": "1.fc36", "source": "rpm", "version": "20230318" } ], "python-pip-wheel": [ { "arch": "noarch", "epoch": null, "name": "python-pip-wheel", "release": "4.fc36", "source": "rpm", "version": "21.3.1" } ], "python-setuptools-wheel": [ { "arch": "noarch", "epoch": null, "name": "python-setuptools-wheel", "release": "4.fc36", "source": "rpm", "version": "59.6.0" } ], "python-unversioned-command": [ { "arch": "noarch", "epoch": null, "name": "python-unversioned-command", "release": "1.fc36", "source": "rpm", "version": "3.10.11" } ], "python3": [ { "arch": "x86_64", "epoch": null, "name": "python3", "release": "1.fc36", "source": "rpm", "version": "3.10.11" } ], "python3-attrs": [ { "arch": "noarch", "epoch": null, "name": "python3-attrs", "release": "2.fc36", "source": "rpm", "version": "21.4.0" } ], "python3-audit": [ { "arch": "x86_64", "epoch": null, "name": "python3-audit", "release": "1.fc36", "source": "rpm", "version": "3.1.1" } ], "python3-blivet": [ { "arch": "noarch", "epoch": 1, "name": "python3-blivet", "release": "1.fc36", "source": "rpm", "version": "3.4.4" } ], "python3-blockdev": [ { "arch": "x86_64", "epoch": null, "name": "python3-blockdev", "release": "2.fc36", "source": "rpm", "version": "2.28" } ], "python3-bytesize": [ { "arch": "x86_64", "epoch": null, "name": "python3-bytesize", "release": "1.fc36", "source": "rpm", "version": "2.7" } ], "python3-cffi": [ { "arch": "x86_64", "epoch": null, "name": "python3-cffi", "release": "5.fc36", "source": "rpm", "version": "1.15.0" } ], "python3-charset-normalizer": [ { "arch": "noarch", "epoch": null, "name": "python3-charset-normalizer", "release": "1.fc36", "source": "rpm", "version": "2.0.11" } ], "python3-configobj": [ { "arch": "noarch", "epoch": null, "name": "python3-configobj", "release": "27.fc36", "source": "rpm", "version": "5.0.6" } ], "python3-configshell": [ { "arch": "noarch", "epoch": 1, "name": "python3-configshell", "release": "4.fc36", "source": "rpm", "version": "1.1.29" } ], "python3-cryptography": [ { "arch": "x86_64", "epoch": null, "name": "python3-cryptography", "release": "4.fc36", "source": "rpm", "version": "36.0.0" } ], "python3-dateutil": [ { "arch": "noarch", "epoch": 1, "name": "python3-dateutil", "release": "8.fc36", "source": "rpm", "version": "2.8.1" } ], "python3-dbus": [ { "arch": "x86_64", "epoch": null, "name": "python3-dbus", "release": "3.fc36", "source": "rpm", "version": "1.2.18" } ], "python3-distro": [ { "arch": "noarch", "epoch": null, "name": "python3-distro", "release": "2.fc36", "source": "rpm", "version": "1.6.0" } ], "python3-dnf": [ { "arch": "noarch", "epoch": null, "name": "python3-dnf", "release": "1.fc36", "source": "rpm", "version": "4.15.0" } ], "python3-dnf-plugins-core": [ { "arch": "noarch", "epoch": null, "name": "python3-dnf-plugins-core", "release": "1.fc36", "source": "rpm", "version": "4.4.0" } ], "python3-firewall": [ { "arch": "noarch", "epoch": null, "name": "python3-firewall", "release": "1.fc36", "source": "rpm", "version": "1.2.5" } ], "python3-gobject-base": [ { "arch": "x86_64", "epoch": null, "name": "python3-gobject-base", "release": "1.fc36", "source": "rpm", "version": "3.42.1" } ], "python3-gobject-base-noarch": [ { "arch": "noarch", "epoch": null, "name": "python3-gobject-base-noarch", "release": "1.fc36", "source": "rpm", "version": "3.42.1" } ], "python3-gpg": [ { "arch": "x86_64", "epoch": null, "name": "python3-gpg", "release": "4.fc36", "source": "rpm", "version": "1.17.0" } ], "python3-hawkey": [ { "arch": "x86_64", "epoch": null, "name": "python3-hawkey", "release": "1.fc36", "source": "rpm", "version": "0.70.0" } ], "python3-idna": [ { "arch": "noarch", "epoch": null, "name": "python3-idna", "release": "2.fc36", "source": "rpm", "version": "3.3" } ], "python3-jinja2": [ { "arch": "noarch", "epoch": null, "name": "python3-jinja2", "release": "2.fc36", "source": "rpm", "version": "3.0.3" } ], "python3-jsonpatch": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonpatch", "release": "19.fc36", "source": "rpm", "version": "1.21" } ], "python3-jsonpointer": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonpointer", "release": "5.fc36", "source": "rpm", "version": "2.0" } ], "python3-jsonschema": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonschema", "release": "13.fc36", "source": "rpm", "version": "3.2.0" } ], "python3-jwt": [ { "arch": "noarch", "epoch": null, "name": "python3-jwt", "release": "1.fc36", "source": "rpm", "version": "2.4.0" } ], "python3-jwt+crypto": [ { "arch": "noarch", "epoch": null, "name": "python3-jwt+crypto", "release": "1.fc36", "source": "rpm", "version": "2.4.0" } ], "python3-kmod": [ { "arch": "x86_64", "epoch": null, "name": "python3-kmod", "release": "33.fc36", "source": "rpm", "version": "0.9" } ], "python3-libcomps": [ { "arch": "x86_64", "epoch": null, "name": "python3-libcomps", "release": "2.fc36", "source": "rpm", "version": "0.1.18" } ], "python3-libdnf": [ { "arch": "x86_64", "epoch": null, "name": "python3-libdnf", "release": "1.fc36", "source": "rpm", "version": "0.70.0" } ], "python3-libs": [ { "arch": "x86_64", "epoch": null, "name": "python3-libs", "release": "1.fc36", "source": "rpm", "version": "3.10.11" } ], "python3-libselinux": [ { "arch": "x86_64", "epoch": null, "name": "python3-libselinux", "release": "4.fc36", "source": "rpm", "version": "3.3" } ], "python3-libsemanage": [ { "arch": "x86_64", "epoch": null, "name": "python3-libsemanage", "release": "3.fc36", "source": "rpm", "version": "3.3" } ], "python3-lxml": [ { "arch": "x86_64", "epoch": null, "name": "python3-lxml", "release": "3.fc36", "source": "rpm", "version": "4.7.1" } ], "python3-markupsafe": [ { "arch": "x86_64", "epoch": null, "name": "python3-markupsafe", "release": "1.fc36", "source": "rpm", "version": "2.1.1" } ], "python3-netifaces": [ { "arch": "x86_64", "epoch": null, "name": "python3-netifaces", "release": "2.fc36", "source": "rpm", "version": "0.11.0" } ], "python3-nftables": [ { "arch": "x86_64", "epoch": 1, "name": "python3-nftables", "release": "3.fc36", "source": "rpm", "version": "1.0.1" } ], "python3-oauthlib": [ { "arch": "noarch", "epoch": null, "name": "python3-oauthlib", "release": "12.fc36", "source": "rpm", "version": "3.0.2" } ], "python3-oauthlib+signedtoken": [ { "arch": "noarch", "epoch": null, "name": "python3-oauthlib+signedtoken", "release": "12.fc36", "source": "rpm", "version": "3.0.2" } ], "python3-ply": [ { "arch": "noarch", "epoch": null, "name": "python3-ply", "release": "15.fc36", "source": "rpm", "version": "3.11" } ], "python3-policycoreutils": [ { "arch": "noarch", "epoch": null, "name": "python3-policycoreutils", "release": "4.fc36", "source": "rpm", "version": "3.3" } ], "python3-prettytable": [ { "arch": "noarch", "epoch": null, "name": "python3-prettytable", "release": "28.fc36", "source": "rpm", "version": "0.7.2" } ], "python3-pycparser": [ { "arch": "noarch", "epoch": null, "name": "python3-pycparser", "release": "6.fc36", "source": "rpm", "version": "2.20" } ], "python3-pyparsing": [ { "arch": "noarch", "epoch": null, "name": "python3-pyparsing", "release": "10.fc36", "source": "rpm", "version": "2.4.7" } ], "python3-pyparted": [ { "arch": "x86_64", "epoch": 1, "name": "python3-pyparted", "release": "1.fc36", "source": "rpm", "version": "3.12.0" } ], "python3-pyrsistent": [ { "arch": "x86_64", "epoch": null, "name": "python3-pyrsistent", "release": "2.fc36", "source": "rpm", "version": "0.18.1" } ], "python3-pyserial": [ { "arch": "noarch", "epoch": null, "name": "python3-pyserial", "release": "13.fc36", "source": "rpm", "version": "3.4" } ], "python3-pysocks": [ { "arch": "noarch", "epoch": null, "name": "python3-pysocks", "release": "12.fc36", "source": "rpm", "version": "1.7.1" } ], "python3-pyudev": [ { "arch": "noarch", "epoch": null, "name": "python3-pyudev", "release": "8.fc36", "source": "rpm", "version": "0.22.0" } ], "python3-pyyaml": [ { "arch": "x86_64", "epoch": null, "name": "python3-pyyaml", "release": "3.fc36", "source": "rpm", "version": "6.0" } ], "python3-requests": [ { "arch": "noarch", "epoch": null, "name": "python3-requests", "release": "2.fc36", "source": "rpm", "version": "2.27.1" } ], "python3-rpm": [ { "arch": "x86_64", "epoch": null, "name": "python3-rpm", "release": "3.fc36", "source": "rpm", "version": "4.17.1" } ], "python3-rtslib": [ { "arch": "noarch", "epoch": null, "name": "python3-rtslib", "release": "1.fc36", "source": "rpm", "version": "2.1.75" } ], "python3-setools": [ { "arch": "x86_64", "epoch": null, "name": "python3-setools", "release": "5.fc36", "source": "rpm", "version": "4.4.0" } ], "python3-setuptools": [ { "arch": "noarch", "epoch": null, "name": "python3-setuptools", "release": "4.fc36", "source": "rpm", "version": "59.6.0" } ], "python3-six": [ { "arch": "noarch", "epoch": null, "name": "python3-six", "release": "5.fc36", "source": "rpm", "version": "1.16.0" } ], "python3-systemd": [ { "arch": "x86_64", "epoch": null, "name": "python3-systemd", "release": "20.fc36", "source": "rpm", "version": "234" } ], "python3-unbound": [ { "arch": "x86_64", "epoch": null, "name": "python3-unbound", "release": "1.fc36", "source": "rpm", "version": "1.17.1" } ], "python3-urllib3": [ { "arch": "noarch", "epoch": null, "name": "python3-urllib3", "release": "1.fc36", "source": "rpm", "version": "1.26.12" } ], "python3-urwid": [ { "arch": "x86_64", "epoch": null, "name": "python3-urwid", "release": "5.fc36", "source": "rpm", "version": "2.1.2" } ], "qa-tools": [ { "arch": "noarch", "epoch": null, "name": "qa-tools", "release": "4.fc36", "source": "rpm", "version": "4.1" } ], "quota": [ { "arch": "x86_64", "epoch": 1, "name": "quota", "release": "7.fc36", "source": "rpm", "version": "4.06" } ], "quota-nls": [ { "arch": "noarch", "epoch": 1, "name": "quota-nls", "release": "7.fc36", "source": "rpm", "version": "4.06" } ], "readline": [ { "arch": "x86_64", "epoch": null, "name": "readline", "release": "2.fc36", "source": "rpm", "version": "8.2" } ], "restraint": [ { "arch": "x86_64", "epoch": null, "name": "restraint", "release": "1.fc36eng", "source": "rpm", "version": "0.4.4" } ], "restraint-rhts": [ { "arch": "x86_64", "epoch": null, "name": "restraint-rhts", "release": "1.fc36eng", "source": "rpm", "version": "0.4.4" } ], "rng-tools": [ { "arch": "x86_64", "epoch": null, "name": "rng-tools", "release": "1.fc36", "source": "rpm", "version": "6.16" } ], "rootfiles": [ { "arch": "noarch", "epoch": null, "name": "rootfiles", "release": "31.fc36", "source": "rpm", "version": "8.1" } ], "rpcbind": [ { "arch": "x86_64", "epoch": null, "name": "rpcbind", "release": "2.fc36", "source": "rpm", "version": "1.2.6" } ], "rpm": [ { "arch": "x86_64", "epoch": null, "name": "rpm", "release": "3.fc36", "source": "rpm", "version": "4.17.1" } ], "rpm-build-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-build-libs", "release": "3.fc36", "source": "rpm", "version": "4.17.1" } ], "rpm-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-libs", "release": "3.fc36", "source": "rpm", "version": "4.17.1" } ], "rpm-plugin-selinux": [ { "arch": "x86_64", "epoch": null, "name": "rpm-plugin-selinux", "release": "3.fc36", "source": "rpm", "version": "4.17.1" } ], "rpm-plugin-systemd-inhibit": [ { "arch": "x86_64", "epoch": null, "name": "rpm-plugin-systemd-inhibit", "release": "3.fc36", "source": "rpm", "version": "4.17.1" } ], "rpm-sign-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-sign-libs", "release": "3.fc36", "source": "rpm", "version": "4.17.1" } ], "rsync": [ { "arch": "x86_64", "epoch": null, "name": "rsync", "release": "1.fc36", "source": "rpm", "version": "3.2.7" } ], "rtl-sdr": [ { "arch": "x86_64", "epoch": null, "name": "rtl-sdr", "release": "11.fc36", "source": "rpm", "version": "0.6.0" } ], "sed": [ { "arch": "x86_64", "epoch": null, "name": "sed", "release": "10.fc36", "source": "rpm", "version": "4.8" } ], "selinux-policy": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy", "release": "1.fc36", "source": "rpm", "version": "36.18" } ], "selinux-policy-targeted": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy-targeted", "release": "1.fc36", "source": "rpm", "version": "36.18" } ], "setup": [ { "arch": "noarch", "epoch": null, "name": "setup", "release": "1.fc36", "source": "rpm", "version": "2.14.1" } ], "sgpio": [ { "arch": "x86_64", "epoch": null, "name": "sgpio", "release": "30.fc36", "source": "rpm", "version": "1.2.0.10" } ], "shadow-utils": [ { "arch": "x86_64", "epoch": 2, "name": "shadow-utils", "release": "6.fc36", "source": "rpm", "version": "4.11.1" } ], "shared-mime-info": [ { "arch": "x86_64", "epoch": null, "name": "shared-mime-info", "release": "3.fc35", "source": "rpm", "version": "2.1" } ], "sqlite-libs": [ { "arch": "x86_64", "epoch": null, "name": "sqlite-libs", "release": "5.fc36", "source": "rpm", "version": "3.36.0" } ], "sssd-client": [ { "arch": "x86_64", "epoch": null, "name": "sssd-client", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "sssd-common": [ { "arch": "x86_64", "epoch": null, "name": "sssd-common", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "sssd-kcm": [ { "arch": "x86_64", "epoch": null, "name": "sssd-kcm", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "sssd-nfs-idmap": [ { "arch": "x86_64", "epoch": null, "name": "sssd-nfs-idmap", "release": "1.fc36", "source": "rpm", "version": "2.7.4" } ], "strace": [ { "arch": "x86_64", "epoch": null, "name": "strace", "release": "1.fc36", "source": "rpm", "version": "6.2" } ], "sudo": [ { "arch": "x86_64", "epoch": null, "name": "sudo", "release": "1.p2.fc36", "source": "rpm", "version": "1.9.13" } ], "sudo-python-plugin": [ { "arch": "x86_64", "epoch": null, "name": "sudo-python-plugin", "release": "1.p2.fc36", "source": "rpm", "version": "1.9.13" } ], "systemd": [ { "arch": "x86_64", "epoch": null, "name": "systemd", "release": "2.fc36", "source": "rpm", "version": "250.10" } ], "systemd-libs": [ { "arch": "x86_64", "epoch": null, "name": "systemd-libs", "release": "2.fc36", "source": "rpm", "version": "250.10" } ], "systemd-networkd": [ { "arch": "x86_64", "epoch": null, "name": "systemd-networkd", "release": "2.fc36", "source": "rpm", "version": "250.10" } ], "systemd-oomd-defaults": [ { "arch": "noarch", "epoch": null, "name": "systemd-oomd-defaults", "release": "2.fc36", "source": "rpm", "version": "250.10" } ], "systemd-pam": [ { "arch": "x86_64", "epoch": null, "name": "systemd-pam", "release": "2.fc36", "source": "rpm", "version": "250.10" } ], "systemd-resolved": [ { "arch": "x86_64", "epoch": null, "name": "systemd-resolved", "release": "2.fc36", "source": "rpm", "version": "250.10" } ], "systemd-udev": [ { "arch": "x86_64", "epoch": null, "name": "systemd-udev", "release": "2.fc36", "source": "rpm", "version": "250.10" } ], "systemtap": [ { "arch": "x86_64", "epoch": null, "name": "systemtap", "release": "3.fc36", "source": "rpm", "version": "4.8" } ], "systemtap-client": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-client", "release": "3.fc36", "source": "rpm", "version": "4.8" } ], "systemtap-devel": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-devel", "release": "3.fc36", "source": "rpm", "version": "4.8" } ], "systemtap-runtime": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-runtime", "release": "3.fc36", "source": "rpm", "version": "4.8" } ], "tar": [ { "arch": "x86_64", "epoch": 2, "name": "tar", "release": "3.fc36", "source": "rpm", "version": "1.34" } ], "target-restore": [ { "arch": "noarch", "epoch": null, "name": "target-restore", "release": "1.fc36", "source": "rpm", "version": "2.1.75" } ], "targetcli": [ { "arch": "noarch", "epoch": null, "name": "targetcli", "release": "4.fc36", "source": "rpm", "version": "2.1.54" } ], "tbb": [ { "arch": "x86_64", "epoch": null, "name": "tbb", "release": "9.fc35", "source": "rpm", "version": "2020.3" } ], "time": [ { "arch": "x86_64", "epoch": null, "name": "time", "release": "18.fc36", "source": "rpm", "version": "1.9" } ], "tpm2-tools": [ { "arch": "x86_64", "epoch": null, "name": "tpm2-tools", "release": "1.fc36", "source": "rpm", "version": "5.4" } ], "tpm2-tss": [ { "arch": "x86_64", "epoch": null, "name": "tpm2-tss", "release": "1.fc36", "source": "rpm", "version": "3.2.2" } ], "tzdata": [ { "arch": "noarch", "epoch": null, "name": "tzdata", "release": "1.fc36", "source": "rpm", "version": "2023c" } ], "udisks2": [ { "arch": "x86_64", "epoch": null, "name": "udisks2", "release": "4.fc36", "source": "rpm", "version": "2.9.4" } ], "unbound-anchor": [ { "arch": "x86_64", "epoch": null, "name": "unbound-anchor", "release": "1.fc36", "source": "rpm", "version": "1.17.1" } ], "unbound-libs": [ { "arch": "x86_64", "epoch": null, "name": "unbound-libs", "release": "1.fc36", "source": "rpm", "version": "1.17.1" } ], "unzip": [ { "arch": "x86_64", "epoch": null, "name": "unzip", "release": "57.fc36", "source": "rpm", "version": "6.0" } ], "userspace-rcu": [ { "arch": "x86_64", "epoch": null, "name": "userspace-rcu", "release": "4.fc36", "source": "rpm", "version": "0.13.0" } ], "util-linux": [ { "arch": "x86_64", "epoch": null, "name": "util-linux", "release": "1.fc36", "source": "rpm", "version": "2.38" } ], "util-linux-core": [ { "arch": "x86_64", "epoch": null, "name": "util-linux-core", "release": "1.fc36", "source": "rpm", "version": "2.38" } ], "vim-common": [ { "arch": "x86_64", "epoch": 2, "name": "vim-common", "release": "1.fc36", "source": "rpm", "version": "9.0.1486" } ], "vim-data": [ { "arch": "noarch", "epoch": 2, "name": "vim-data", "release": "1.fc36", "source": "rpm", "version": "9.0.1486" } ], "vim-default-editor": [ { "arch": "noarch", "epoch": 2, "name": "vim-default-editor", "release": "1.fc36", "source": "rpm", "version": "9.0.1486" } ], "vim-enhanced": [ { "arch": "x86_64", "epoch": 2, "name": "vim-enhanced", "release": "1.fc36", "source": "rpm", "version": "9.0.1486" } ], "vim-filesystem": [ { "arch": "noarch", "epoch": 2, "name": "vim-filesystem", "release": "1.fc36", "source": "rpm", "version": "9.0.1486" } ], "vim-minimal": [ { "arch": "x86_64", "epoch": 2, "name": "vim-minimal", "release": "1.fc36", "source": "rpm", "version": "9.0.1486" } ], "volume_key-libs": [ { "arch": "x86_64", "epoch": null, "name": "volume_key-libs", "release": "15.fc36", "source": "rpm", "version": "0.3.12" } ], "wget": [ { "arch": "x86_64", "epoch": null, "name": "wget", "release": "1.fc36", "source": "rpm", "version": "1.21.3" } ], "which": [ { "arch": "x86_64", "epoch": null, "name": "which", "release": "32.fc36", "source": "rpm", "version": "2.21" } ], "whois-nls": [ { "arch": "noarch", "epoch": null, "name": "whois-nls", "release": "1.fc36", "source": "rpm", "version": "5.5.15" } ], "xfsprogs": [ { "arch": "x86_64", "epoch": null, "name": "xfsprogs", "release": "2.fc36", "source": "rpm", "version": "5.14.2" } ], "xkeyboard-config": [ { "arch": "noarch", "epoch": null, "name": "xkeyboard-config", "release": "1.fc36", "source": "rpm", "version": "2.35.1" } ], "xxd": [ { "arch": "x86_64", "epoch": 2, "name": "xxd", "release": "1.fc36", "source": "rpm", "version": "9.0.1486" } ], "xxhash-libs": [ { "arch": "x86_64", "epoch": null, "name": "xxhash-libs", "release": "2.fc36", "source": "rpm", "version": "0.8.1" } ], "xz": [ { "arch": "x86_64", "epoch": null, "name": "xz", "release": "1.fc36", "source": "rpm", "version": "5.4.1" } ], "xz-devel": [ { "arch": "x86_64", "epoch": null, "name": "xz-devel", "release": "1.fc36", "source": "rpm", "version": "5.4.1" } ], "xz-libs": [ { "arch": "x86_64", "epoch": null, "name": "xz-libs", "release": "1.fc36", "source": "rpm", "version": "5.4.1" } ], "yum": [ { "arch": "noarch", "epoch": null, "name": "yum", "release": "1.fc36", "source": "rpm", "version": "4.15.0" } ], "zchunk-libs": [ { "arch": "x86_64", "epoch": null, "name": "zchunk-libs", "release": "1.fc36", "source": "rpm", "version": "1.3.1" } ], "zip": [ { "arch": "x86_64", "epoch": null, "name": "zip", "release": "32.fc36", "source": "rpm", "version": "3.0" } ], "zlib": [ { "arch": "x86_64", "epoch": null, "name": "zlib", "release": "33.fc36", "source": "rpm", "version": "1.2.11" } ], "zlib-devel": [ { "arch": "x86_64", "epoch": null, "name": "zlib-devel", "release": "33.fc36", "source": "rpm", "version": "1.2.11" } ], "zram-generator": [ { "arch": "x86_64", "epoch": null, "name": "zram-generator", "release": "1.fc36", "source": "rpm", "version": "1.1.2" } ], "zram-generator-defaults": [ { "arch": "noarch", "epoch": null, "name": "zram-generator-defaults", "release": "1.fc36", "source": "rpm", "version": "1.1.2" } ] } }, "changed": false } TASK [Set blivet package name] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:28 Thursday 01 June 2023 00:34:56 +0000 (0:00:00.922) 0:00:17.038 ********* ok: [sut] => { "ansible_facts": { "blivet_pkg_name": [ "python3-blivet" ] }, "changed": false } TASK [Set blivet package version] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:33 Thursday 01 June 2023 00:34:56 +0000 (0:00:00.056) 0:00:17.094 ********* ok: [sut] => { "ansible_facts": { "blivet_pkg_version": "3.4.4-1.fc36" }, "changed": false } TASK [Get unused disks] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:55 Thursday 01 June 2023 00:34:56 +0000 (0:00:00.050) 0:00:17.144 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/get_unused_disk.yml for sut TASK [Find unused disks in the system] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/get_unused_disk.yml:2 Thursday 01 June 2023 00:34:56 +0000 (0:00:00.127) 0:00:17.272 ********* ok: [sut] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ] } TASK [Set unused_disks if necessary] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/get_unused_disk.yml:10 Thursday 01 June 2023 00:34:57 +0000 (0:00:00.330) 0:00:17.603 ********* ok: [sut] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/get_unused_disk.yml:15 Thursday 01 June 2023 00:34:57 +0000 (0:00:00.084) 0:00:17.687 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/get_unused_disk.yml:20 Thursday 01 June 2023 00:34:57 +0000 (0:00:00.079) 0:00:17.766 ********* ok: [sut] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ] } TASK [Create volume group 'foo' with 3 PVs] ************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:61 Thursday 01 June 2023 00:34:57 +0000 (0:00:00.080) 0:00:17.847 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:34:57 +0000 (0:00:00.119) 0:00:17.966 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:34:57 +0000 (0:00:00.108) 0:00:18.075 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.406) 0:00:18.482 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.261) 0:00:18.743 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.128) 0:00:18.871 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.077) 0:00:18.949 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.117) 0:00:19.067 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.077) 0:00:19.144 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.079) 0:00:19.223 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.078) 0:00:19.302 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:34:58 +0000 (0:00:00.077) 0:00:19.379 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:34:59 +0000 (0:00:00.077) 0:00:19.457 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:34:59 +0000 (0:00:00.078) 0:00:19.535 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:34:59 +0000 (0:00:00.081) 0:00:19.617 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:34:59 +0000 (0:00:00.097) 0:00:19.715 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:34:59 +0000 (0:00:00.038) 0:00:19.753 ********* changed: [sut] => { "actions": [ { "action": "create format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdi1", "fs_type": null }, { "action": "create format", "device": "/dev/sdi1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdh1", "fs_type": null }, { "action": "create format", "device": "/dev/sdh1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdg1", "fs_type": null }, { "action": "create format", "device": "/dev/sdg1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdf1", "fs_type": null }, { "action": "create format", "device": "/dev/sdf1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sde1", "fs_type": null }, { "action": "create format", "device": "/dev/sde1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdd1", "fs_type": null }, { "action": "create format", "device": "/dev/sdd1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:35:10 +0000 (0:00:11.503) 0:00:31.257 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:35:10 +0000 (0:00:00.078) 0:00:31.335 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:35:10 +0000 (0:00:00.036) 0:00:31.372 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdi1", "fs_type": null }, { "action": "create format", "device": "/dev/sdi1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdh1", "fs_type": null }, { "action": "create format", "device": "/dev/sdh1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdg1", "fs_type": null }, { "action": "create format", "device": "/dev/sdg1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdf1", "fs_type": null }, { "action": "create format", "device": "/dev/sdf1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sde1", "fs_type": null }, { "action": "create format", "device": "/dev/sde1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdd1", "fs_type": null }, { "action": "create format", "device": "/dev/sdd1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.081) 0:00:31.453 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.119) 0:00:31.573 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.080) 0:00:31.653 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.037) 0:00:31.691 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.079) 0:00:31.770 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.038) 0:00:31.808 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.037) 0:00:31.846 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.077) 0:00:31.923 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579192.8006275, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1684244424.757, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131081, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1684244183.529, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3816983141", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.276) 0:00:32.200 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:35:11 +0000 (0:00:00.038) 0:00:32.238 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:69 Thursday 01 June 2023 00:35:12 +0000 (0:00:00.704) 0:00:32.942 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:35:12 +0000 (0:00:00.160) 0:00:33.103 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:35:12 +0000 (0:00:00.084) 0:00:33.188 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:35:12 +0000 (0:00:00.080) 0:00:33.268 ********* ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "FRfV4g-kvn2-05Oi-ffAi-p1la-pUwT-rQWmmR" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdb1", "size": "10G", "type": "partition", "uuid": "WyJamY-6eEb-bka5-IAiL-EQe4-c03j-22SqZs" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdc1", "size": "10G", "type": "partition", "uuid": "xAnCDc-QeTX-duFe-6OEw-B4GU-GdoJ-dkUWe9" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdd1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdd1", "size": "1024G", "type": "partition", "uuid": "xJIwRV-kYEa-eb0v-lEZv-fj0p-eBFr-dKJjUX" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sde1", "size": "1024G", "type": "partition", "uuid": "Wecfyb-GaNU-WeHx-eu6j-osd1-1cJT-be5Xyc" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdf1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdf1", "size": "10G", "type": "partition", "uuid": "tytqd3-Kg3G-09ED-BjS5-9v1F-5oSV-xUD72G" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdg1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdg1", "size": "1024G", "type": "partition", "uuid": "of1Zcf-c157-20UK-v7Wy-qQGR-5E5S-HWpDzp" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdh1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdh1", "size": "10G", "type": "partition", "uuid": "9kmY14-TWjC-ilMb-byLS-eYrh-OzXZ-8DEXWB" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdi1", "size": "10G", "type": "partition", "uuid": "m2UFda-BEYQ-KCKg-kBx1-21wR-CcU8-3EQJjM" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:35:13 +0000 (0:00:00.318) 0:00:33.587 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003343", "end": "2023-06-01 00:35:13.434737", "rc": 0, "start": "2023-06-01 00:35:13.431394" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:35:13 +0000 (0:00:00.313) 0:00:33.900 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003278", "end": "2023-06-01 00:35:13.711394", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:35:13.708116" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:35:13 +0000 (0:00:00.278) 0:00:34.179 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:35:13 +0000 (0:00:00.121) 0:00:34.300 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:35:13 +0000 (0:00:00.083) 0:00:34.383 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:35:14 +0000 (0:00:00.204) 0:00:34.588 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "9", "_storage_test_pool_pvs_lvm": [ "/dev/sda1", "/dev/sdb1", "/dev/sdc1", "/dev/sdd1", "/dev/sde1", "/dev/sdf1", "/dev/sdg1", "/dev/sdh1", "/dev/sdi1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:35:14 +0000 (0:00:00.091) 0:00:34.680 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda1", "pv": "/dev/sda1" } ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdb1", "pv": "/dev/sdb1" } ok: [sut] => (item=/dev/sdc1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdc1", "pv": "/dev/sdc1" } ok: [sut] => (item=/dev/sdd1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdd1", "pv": "/dev/sdd1" } ok: [sut] => (item=/dev/sde1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sde1", "pv": "/dev/sde1" } ok: [sut] => (item=/dev/sdf1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdf1", "pv": "/dev/sdf1" } ok: [sut] => (item=/dev/sdg1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdg1", "pv": "/dev/sdg1" } ok: [sut] => (item=/dev/sdh1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdh1", "pv": "/dev/sdh1" } ok: [sut] => (item=/dev/sdi1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdi1", "pv": "/dev/sdi1" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:35:16 +0000 (0:00:02.240) 0:00:36.920 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "9" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:35:16 +0000 (0:00:00.086) 0:00:37.007 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda1", "/dev/sdb1", "/dev/sdc1", "/dev/sdd1", "/dev/sde1", "/dev/sdf1", "/dev/sdg1", "/dev/sdh1", "/dev/sdi1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:35:16 +0000 (0:00:00.087) 0:00:37.095 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:35:16 +0000 (0:00:00.090) 0:00:37.185 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:35:16 +0000 (0:00:00.088) 0:00:37.273 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:35:17 +0000 (0:00:00.157) 0:00:37.430 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:35:17 +0000 (0:00:00.099) 0:00:37.529 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdb1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdc1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdc1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdd1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdd1" } MSG: All assertions passed ok: [sut] => (item=/dev/sde1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sde1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdf1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdf1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdg1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdg1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdh1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdh1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdi1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdi1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:35:17 +0000 (0:00:00.480) 0:00:38.009 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:35:17 +0000 (0:00:00.121) 0:00:38.131 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:35:17 +0000 (0:00:00.083) 0:00:38.215 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:35:17 +0000 (0:00:00.084) 0:00:38.300 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:35:17 +0000 (0:00:00.081) 0:00:38.381 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.090) 0:00:38.472 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.081) 0:00:38.554 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.082) 0:00:38.637 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.098) 0:00:38.735 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.155) 0:00:38.890 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.131) 0:00:39.022 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.042) 0:00:39.064 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.128) 0:00:39.192 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.043) 0:00:39.236 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:35:18 +0000 (0:00:00.127) 0:00:39.363 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:35:19 +0000 (0:00:00.086) 0:00:39.450 ********* skipping: [sut] => (item=/dev/sda1) => { "_storage_test_pool_member_path": "/dev/sda1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdb1) => { "_storage_test_pool_member_path": "/dev/sdb1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdc1) => { "_storage_test_pool_member_path": "/dev/sdc1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdd1) => { "_storage_test_pool_member_path": "/dev/sdd1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sde1) => { "_storage_test_pool_member_path": "/dev/sde1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdf1) => { "_storage_test_pool_member_path": "/dev/sdf1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdg1) => { "_storage_test_pool_member_path": "/dev/sdg1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdh1) => { "_storage_test_pool_member_path": "/dev/sdh1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdi1) => { "_storage_test_pool_member_path": "/dev/sdi1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:35:19 +0000 (0:00:00.448) 0:00:39.898 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.544) 0:00:40.443 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.085) 0:00:40.528 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.101) 0:00:40.629 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.083) 0:00:40.712 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.084) 0:00:40.797 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.086) 0:00:40.883 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.086) 0:00:40.970 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.090) 0:00:41.060 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.084) 0:00:41.145 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.079) 0:00:41.225 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:20 +0000 (0:00:00.086) 0:00:41.312 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.152) 0:00:41.464 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.100) 0:00:41.565 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.089) 0:00:41.654 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.091) 0:00:41.746 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.082) 0:00:41.829 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.088) 0:00:41.917 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.094) 0:00:42.011 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.085) 0:00:42.097 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.086) 0:00:42.183 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.087) 0:00:42.270 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:21 +0000 (0:00:00.083) 0:00:42.354 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.083) 0:00:42.437 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.078) 0:00:42.516 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.140) 0:00:42.657 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.088) 0:00:42.745 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.088) 0:00:42.833 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.084) 0:00:42.918 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.084) 0:00:43.003 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.085) 0:00:43.089 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.080) 0:00:43.169 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.086) 0:00:43.256 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:22 +0000 (0:00:00.088) 0:00:43.344 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.099) 0:00:43.443 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.095) 0:00:43.539 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.084) 0:00:43.623 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.154) 0:00:43.777 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.088) 0:00:43.866 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.088) 0:00:43.955 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.086) 0:00:44.041 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.103) 0:00:44.145 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.085) 0:00:44.230 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.083) 0:00:44.314 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:23 +0000 (0:00:00.100) 0:00:44.414 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.104) 0:00:44.519 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.121) 0:00:44.640 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.080) 0:00:44.720 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.078) 0:00:44.799 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.079) 0:00:44.879 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.151) 0:00:45.030 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.085) 0:00:45.116 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.082) 0:00:45.198 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.082) 0:00:45.280 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:24 +0000 (0:00:00.082) 0:00:45.363 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.079) 0:00:45.442 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.079) 0:00:45.522 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.129) 0:00:45.652 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.040) 0:00:45.692 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.083) 0:00:45.776 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.037) 0:00:45.813 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.036) 0:00:45.849 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Save UUID of the created volume group] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:72 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.149) 0:00:45.999 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.043197", "end": "2023-06-01 00:35:25.840554", "rc": 0, "start": "2023-06-01 00:35:25.797357" } STDOUT: ryCWJR-aiQG-u1Y7-UAI9-6KSj-Bhpy-1xxGXu TASK [Verify that nothing changes when disks don't change] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:77 Thursday 01 June 2023 00:35:25 +0000 (0:00:00.309) 0:00:46.308 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:35:26 +0000 (0:00:00.121) 0:00:46.430 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:35:26 +0000 (0:00:00.109) 0:00:46.539 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:35:26 +0000 (0:00:00.412) 0:00:46.952 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:35:26 +0000 (0:00:00.257) 0:00:47.209 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:35:26 +0000 (0:00:00.084) 0:00:47.294 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:35:26 +0000 (0:00:00.087) 0:00:47.381 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.165) 0:00:47.546 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.086) 0:00:47.633 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.088) 0:00:47.721 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.081) 0:00:47.802 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.083) 0:00:47.886 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.083) 0:00:47.969 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.085) 0:00:48.054 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.086) 0:00:48.141 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.100) 0:00:48.242 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:35:27 +0000 (0:00:00.038) 0:00:48.281 ********* ok: [sut] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/foo", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:35:31 +0000 (0:00:03.622) 0:00:51.903 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:35:31 +0000 (0:00:00.099) 0:00:52.003 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:35:31 +0000 (0:00:00.041) 0:00:52.044 ********* ok: [sut] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/foo", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:35:31 +0000 (0:00:00.083) 0:00:52.128 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:35:31 +0000 (0:00:00.081) 0:00:52.210 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:35:31 +0000 (0:00:00.084) 0:00:52.294 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:35:31 +0000 (0:00:00.062) 0:00:52.356 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:35:32 +0000 (0:00:00.083) 0:00:52.440 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:35:32 +0000 (0:00:00.039) 0:00:52.479 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:35:32 +0000 (0:00:00.039) 0:00:52.519 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:35:32 +0000 (0:00:00.080) 0:00:52.600 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579192.8006275, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1684244424.757, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131081, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1684244183.529, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3816983141", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:35:32 +0000 (0:00:00.273) 0:00:52.873 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:35:32 +0000 (0:00:00.039) 0:00:52.912 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:85 Thursday 01 June 2023 00:35:33 +0000 (0:00:00.692) 0:00:53.605 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:35:33 +0000 (0:00:00.138) 0:00:53.744 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:35:33 +0000 (0:00:00.084) 0:00:53.828 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:35:33 +0000 (0:00:00.140) 0:00:53.969 ********* ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "FRfV4g-kvn2-05Oi-ffAi-p1la-pUwT-rQWmmR" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdb1", "size": "10G", "type": "partition", "uuid": "WyJamY-6eEb-bka5-IAiL-EQe4-c03j-22SqZs" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdc1", "size": "10G", "type": "partition", "uuid": "xAnCDc-QeTX-duFe-6OEw-B4GU-GdoJ-dkUWe9" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdd1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdd1", "size": "1024G", "type": "partition", "uuid": "xJIwRV-kYEa-eb0v-lEZv-fj0p-eBFr-dKJjUX" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sde1", "size": "1024G", "type": "partition", "uuid": "Wecfyb-GaNU-WeHx-eu6j-osd1-1cJT-be5Xyc" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdf1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdf1", "size": "10G", "type": "partition", "uuid": "tytqd3-Kg3G-09ED-BjS5-9v1F-5oSV-xUD72G" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdg1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdg1", "size": "1024G", "type": "partition", "uuid": "of1Zcf-c157-20UK-v7Wy-qQGR-5E5S-HWpDzp" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdh1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdh1", "size": "10G", "type": "partition", "uuid": "9kmY14-TWjC-ilMb-byLS-eYrh-OzXZ-8DEXWB" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdi1", "size": "10G", "type": "partition", "uuid": "m2UFda-BEYQ-KCKg-kBx1-21wR-CcU8-3EQJjM" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:35:33 +0000 (0:00:00.278) 0:00:54.248 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003415", "end": "2023-06-01 00:35:34.049818", "rc": 0, "start": "2023-06-01 00:35:34.046403" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:35:34 +0000 (0:00:00.268) 0:00:54.516 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003502", "end": "2023-06-01 00:35:34.320435", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:35:34.316933" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:35:34 +0000 (0:00:00.271) 0:00:54.787 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:35:34 +0000 (0:00:00.119) 0:00:54.907 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:35:34 +0000 (0:00:00.080) 0:00:54.988 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:35:34 +0000 (0:00:00.160) 0:00:55.148 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "9", "_storage_test_pool_pvs_lvm": [ "/dev/sda1", "/dev/sdb1", "/dev/sdc1", "/dev/sdd1", "/dev/sde1", "/dev/sdf1", "/dev/sdg1", "/dev/sdh1", "/dev/sdi1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:35:34 +0000 (0:00:00.087) 0:00:55.235 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda1", "pv": "/dev/sda1" } ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdb1", "pv": "/dev/sdb1" } ok: [sut] => (item=/dev/sdc1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdc1", "pv": "/dev/sdc1" } ok: [sut] => (item=/dev/sdd1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdd1", "pv": "/dev/sdd1" } ok: [sut] => (item=/dev/sde1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sde1", "pv": "/dev/sde1" } ok: [sut] => (item=/dev/sdf1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdf1", "pv": "/dev/sdf1" } ok: [sut] => (item=/dev/sdg1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdg1", "pv": "/dev/sdg1" } ok: [sut] => (item=/dev/sdh1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdh1", "pv": "/dev/sdh1" } ok: [sut] => (item=/dev/sdi1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdi1", "pv": "/dev/sdi1" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:35:36 +0000 (0:00:02.093) 0:00:57.329 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "9" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:35:36 +0000 (0:00:00.082) 0:00:57.411 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda1", "/dev/sdb1", "/dev/sdc1", "/dev/sdd1", "/dev/sde1", "/dev/sdf1", "/dev/sdg1", "/dev/sdh1", "/dev/sdi1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:35:37 +0000 (0:00:00.085) 0:00:57.497 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:35:37 +0000 (0:00:00.084) 0:00:57.582 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:35:37 +0000 (0:00:00.083) 0:00:57.665 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:35:37 +0000 (0:00:00.086) 0:00:57.752 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:35:37 +0000 (0:00:00.081) 0:00:57.833 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdb1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdc1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdc1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdd1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdd1" } MSG: All assertions passed ok: [sut] => (item=/dev/sde1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sde1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdf1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdf1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdg1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdg1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdh1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdh1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdi1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdi1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:35:37 +0000 (0:00:00.481) 0:00:58.315 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.172) 0:00:58.487 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.082) 0:00:58.569 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.080) 0:00:58.650 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.078) 0:00:58.729 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.079) 0:00:58.808 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.080) 0:00:58.888 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.080) 0:00:58.969 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.080) 0:00:59.050 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.082) 0:00:59.133 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.126) 0:00:59.259 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:35:38 +0000 (0:00:00.039) 0:00:59.298 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:35:39 +0000 (0:00:00.184) 0:00:59.483 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:35:39 +0000 (0:00:00.040) 0:00:59.523 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:35:39 +0000 (0:00:00.126) 0:00:59.650 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:35:39 +0000 (0:00:00.081) 0:00:59.731 ********* skipping: [sut] => (item=/dev/sda1) => { "_storage_test_pool_member_path": "/dev/sda1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdb1) => { "_storage_test_pool_member_path": "/dev/sdb1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdc1) => { "_storage_test_pool_member_path": "/dev/sdc1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdd1) => { "_storage_test_pool_member_path": "/dev/sdd1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sde1) => { "_storage_test_pool_member_path": "/dev/sde1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdf1) => { "_storage_test_pool_member_path": "/dev/sdf1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdg1) => { "_storage_test_pool_member_path": "/dev/sdg1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdh1) => { "_storage_test_pool_member_path": "/dev/sdh1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdi1) => { "_storage_test_pool_member_path": "/dev/sdi1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:35:39 +0000 (0:00:00.450) 0:01:00.182 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.506) 0:01:00.688 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.084) 0:01:00.773 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.137) 0:01:00.910 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.080) 0:01:00.990 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.079) 0:01:01.070 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.079) 0:01:01.150 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.083) 0:01:01.233 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.082) 0:01:01.315 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:40 +0000 (0:00:00.085) 0:01:01.401 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.080) 0:01:01.481 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.086) 0:01:01.568 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.082) 0:01:01.650 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.080) 0:01:01.731 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.082) 0:01:01.813 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.083) 0:01:01.897 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.164) 0:01:02.061 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.086) 0:01:02.147 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.085) 0:01:02.233 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:41 +0000 (0:00:00.085) 0:01:02.319 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.107) 0:01:02.427 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.088) 0:01:02.515 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.082) 0:01:02.597 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.079) 0:01:02.677 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.080) 0:01:02.758 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.087) 0:01:02.845 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.085) 0:01:02.931 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.099) 0:01:03.030 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.085) 0:01:03.116 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.127) 0:01:03.243 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.083) 0:01:03.326 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:42 +0000 (0:00:00.088) 0:01:03.415 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.086) 0:01:03.501 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.089) 0:01:03.591 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.082) 0:01:03.674 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.083) 0:01:03.757 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.082) 0:01:03.839 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.083) 0:01:03.923 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.085) 0:01:04.008 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.085) 0:01:04.094 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.085) 0:01:04.179 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.122) 0:01:04.302 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:43 +0000 (0:00:00.101) 0:01:04.403 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.093) 0:01:04.497 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.091) 0:01:04.588 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.102) 0:01:04.690 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.121) 0:01:04.812 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.087) 0:01:04.900 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.094) 0:01:04.995 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.095) 0:01:05.091 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.085) 0:01:05.176 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.087) 0:01:05.263 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:35:44 +0000 (0:00:00.086) 0:01:05.350 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.082) 0:01:05.433 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.153) 0:01:05.587 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.085) 0:01:05.672 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.093) 0:01:05.766 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.145) 0:01:05.912 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.040) 0:01:05.952 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.088) 0:01:06.040 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.040) 0:01:06.080 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.039) 0:01:06.120 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:88 Thursday 01 June 2023 00:35:45 +0000 (0:00:00.085) 0:01:06.205 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.044158", "end": "2023-06-01 00:35:46.051773", "rc": 0, "start": "2023-06-01 00:35:46.007615" } STDOUT: ryCWJR-aiQG-u1Y7-UAI9-6KSj-Bhpy-1xxGXu TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:93 Thursday 01 June 2023 00:35:46 +0000 (0:00:00.313) 0:01:06.519 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Remove 2 PVs from the 'foo' volume group] ******************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:98 Thursday 01 June 2023 00:35:46 +0000 (0:00:00.081) 0:01:06.601 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:35:46 +0000 (0:00:00.217) 0:01:06.819 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:35:46 +0000 (0:00:00.118) 0:01:06.938 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:35:46 +0000 (0:00:00.415) 0:01:07.353 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.252) 0:01:07.606 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.079) 0:01:07.686 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.081) 0:01:07.767 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.127) 0:01:07.894 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.083) 0:01:07.978 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.159) 0:01:08.137 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.082) 0:01:08.219 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.079) 0:01:08.299 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:35:47 +0000 (0:00:00.079) 0:01:08.378 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:35:48 +0000 (0:00:00.081) 0:01:08.460 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:35:48 +0000 (0:00:00.083) 0:01:08.543 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:35:48 +0000 (0:00:00.097) 0:01:08.641 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:35:48 +0000 (0:00:00.037) 0:01:08.678 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sdi1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdi1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdh1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdf1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sde1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdd1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/foo", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:35:55 +0000 (0:00:07.099) 0:01:15.777 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.085) 0:01:15.863 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.042) 0:01:15.905 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sdi1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdi1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdh1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdf1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sde1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdd1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/foo", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.088) 0:01:15.994 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.123) 0:01:16.117 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.086) 0:01:16.203 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.040) 0:01:16.243 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.083) 0:01:16.327 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.041) 0:01:16.368 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:35:55 +0000 (0:00:00.038) 0:01:16.406 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:35:56 +0000 (0:00:00.080) 0:01:16.487 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579192.8006275, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1684244424.757, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131081, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1684244183.529, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3816983141", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:35:56 +0000 (0:00:00.276) 0:01:16.763 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:35:56 +0000 (0:00:00.040) 0:01:16.804 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:106 Thursday 01 June 2023 00:35:57 +0000 (0:00:00.672) 0:01:17.477 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:35:57 +0000 (0:00:00.155) 0:01:17.632 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:35:57 +0000 (0:00:00.087) 0:01:17.719 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:35:57 +0000 (0:00:00.083) 0:01:17.803 ********* ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "FRfV4g-kvn2-05Oi-ffAi-p1la-pUwT-rQWmmR" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:35:57 +0000 (0:00:00.273) 0:01:18.077 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003272", "end": "2023-06-01 00:35:57.878508", "rc": 0, "start": "2023-06-01 00:35:57.875236" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:35:57 +0000 (0:00:00.270) 0:01:18.348 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003319", "end": "2023-06-01 00:35:58.153826", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:35:58.150507" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:35:58 +0000 (0:00:00.272) 0:01:18.620 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:35:58 +0000 (0:00:00.115) 0:01:18.736 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:35:58 +0000 (0:00:00.078) 0:01:18.814 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:35:58 +0000 (0:00:00.160) 0:01:18.975 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:35:58 +0000 (0:00:00.149) 0:01:19.124 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda1", "pv": "/dev/sda1" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:35:58 +0000 (0:00:00.269) 0:01:19.394 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.081) 0:01:19.476 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.084) 0:01:19.560 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.085) 0:01:19.645 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.081) 0:01:19.727 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.081) 0:01:19.808 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.079) 0:01:19.888 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.088) 0:01:19.976 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.131) 0:01:20.108 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.080) 0:01:20.188 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.076) 0:01:20.265 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:35:59 +0000 (0:00:00.132) 0:01:20.397 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.080) 0:01:20.478 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.077) 0:01:20.555 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.076) 0:01:20.632 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.078) 0:01:20.710 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.078) 0:01:20.788 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.118) 0:01:20.906 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.038) 0:01:20.945 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.118) 0:01:21.064 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.036) 0:01:21.101 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.146) 0:01:21.247 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.082) 0:01:21.329 ********* skipping: [sut] => (item=/dev/sda1) => { "_storage_test_pool_member_path": "/dev/sda1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:36:00 +0000 (0:00:00.082) 0:01:21.412 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.116) 0:01:21.528 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.082) 0:01:21.611 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.082) 0:01:21.693 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.078) 0:01:21.772 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.076) 0:01:21.849 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.081) 0:01:21.930 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.084) 0:01:22.014 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.083) 0:01:22.097 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.159) 0:01:22.257 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.040) 0:01:22.298 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.079) 0:01:22.377 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:36:01 +0000 (0:00:00.038) 0:01:22.415 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:36:02 +0000 (0:00:00.038) 0:01:22.453 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:109 Thursday 01 June 2023 00:36:02 +0000 (0:00:00.085) 0:01:22.539 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.033896", "end": "2023-06-01 00:36:02.369543", "rc": 0, "start": "2023-06-01 00:36:02.335647" } STDOUT: ryCWJR-aiQG-u1Y7-UAI9-6KSj-Bhpy-1xxGXu TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:114 Thursday 01 June 2023 00:36:02 +0000 (0:00:00.297) 0:01:22.836 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Add the second disk back to the 'foo' volume group] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:119 Thursday 01 June 2023 00:36:02 +0000 (0:00:00.082) 0:01:22.919 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:36:02 +0000 (0:00:00.138) 0:01:23.057 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:36:02 +0000 (0:00:00.113) 0:01:23.171 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.435) 0:01:23.607 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.238) 0:01:23.845 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.078) 0:01:23.924 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.080) 0:01:24.004 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.121) 0:01:24.125 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.080) 0:01:24.206 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb" ], "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.080) 0:01:24.286 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:36:03 +0000 (0:00:00.079) 0:01:24.366 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:36:04 +0000 (0:00:00.078) 0:01:24.444 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:36:04 +0000 (0:00:00.080) 0:01:24.525 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:36:04 +0000 (0:00:00.078) 0:01:24.604 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:36:04 +0000 (0:00:00.121) 0:01:24.726 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:36:04 +0000 (0:00:00.107) 0:01:24.833 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:36:04 +0000 (0:00:00.037) 0:01:24.871 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/sdb1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/foo", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:36:07 +0000 (0:00:02.915) 0:01:27.786 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.080) 0:01:27.866 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.036) 0:01:27.903 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/sdb1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/foo", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.107) 0:01:28.010 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.083) 0:01:28.094 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.079) 0:01:28.173 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.039) 0:01:28.213 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.080) 0:01:28.294 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.041) 0:01:28.335 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:36:07 +0000 (0:00:00.040) 0:01:28.376 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:36:08 +0000 (0:00:00.077) 0:01:28.454 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579192.8006275, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1684244424.757, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131081, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1684244183.529, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3816983141", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:36:08 +0000 (0:00:00.267) 0:01:28.722 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:36:08 +0000 (0:00:00.036) 0:01:28.758 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:127 Thursday 01 June 2023 00:36:09 +0000 (0:00:00.684) 0:01:29.443 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:36:09 +0000 (0:00:00.152) 0:01:29.595 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:36:09 +0000 (0:00:00.086) 0:01:29.681 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:36:09 +0000 (0:00:00.076) 0:01:29.758 ********* ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "FRfV4g-kvn2-05Oi-ffAi-p1la-pUwT-rQWmmR" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdb1", "size": "10G", "type": "partition", "uuid": "6P9SQA-gfzm-hBMh-3UIW-lsM6-WO9z-ZUgvVx" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:36:09 +0000 (0:00:00.272) 0:01:30.031 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003342", "end": "2023-06-01 00:36:09.831725", "rc": 0, "start": "2023-06-01 00:36:09.828383" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:36:09 +0000 (0:00:00.267) 0:01:30.298 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003308", "end": "2023-06-01 00:36:10.100609", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:36:10.097301" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:36:10 +0000 (0:00:00.268) 0:01:30.566 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:36:10 +0000 (0:00:00.127) 0:01:30.694 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:36:10 +0000 (0:00:00.083) 0:01:30.777 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:36:10 +0000 (0:00:00.177) 0:01:30.955 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "2", "_storage_test_pool_pvs_lvm": [ "/dev/sda1", "/dev/sdb1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:36:10 +0000 (0:00:00.156) 0:01:31.112 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda1", "pv": "/dev/sda1" } ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdb1", "pv": "/dev/sdb1" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.505) 0:01:31.617 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "2" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.085) 0:01:31.702 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda1", "/dev/sdb1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.102) 0:01:31.804 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.087) 0:01:31.891 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.081) 0:01:31.973 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.106) 0:01:32.079 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.084) 0:01:32.164 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdb1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:36:11 +0000 (0:00:00.133) 0:01:32.297 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.132) 0:01:32.429 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.111) 0:01:32.540 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.081) 0:01:32.622 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.077) 0:01:32.699 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.078) 0:01:32.778 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.081) 0:01:32.860 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.078) 0:01:32.938 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.080) 0:01:33.018 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.079) 0:01:33.097 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.124) 0:01:33.221 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.039) 0:01:33.261 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:36:12 +0000 (0:00:00.127) 0:01:33.389 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.039) 0:01:33.428 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.189) 0:01:33.618 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.083) 0:01:33.702 ********* skipping: [sut] => (item=/dev/sda1) => { "_storage_test_pool_member_path": "/dev/sda1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdb1) => { "_storage_test_pool_member_path": "/dev/sdb1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.124) 0:01:33.826 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.158) 0:01:33.985 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.083) 0:01:34.068 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.079) 0:01:34.148 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.080) 0:01:34.229 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.078) 0:01:34.308 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:36:13 +0000 (0:00:00.077) 0:01:34.385 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.079) 0:01:34.465 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.080) 0:01:34.546 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.136) 0:01:34.682 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.079) 0:01:34.761 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.080) 0:01:34.841 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.084) 0:01:34.925 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.088) 0:01:35.014 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.085) 0:01:35.099 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.129) 0:01:35.229 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.040) 0:01:35.269 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.084) 0:01:35.354 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:36:14 +0000 (0:00:00.038) 0:01:35.393 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:36:15 +0000 (0:00:00.036) 0:01:35.429 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:130 Thursday 01 June 2023 00:36:15 +0000 (0:00:00.085) 0:01:35.515 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.033751", "end": "2023-06-01 00:36:15.383298", "rc": 0, "start": "2023-06-01 00:36:15.349547" } STDOUT: ryCWJR-aiQG-u1Y7-UAI9-6KSj-Bhpy-1xxGXu TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:135 Thursday 01 June 2023 00:36:15 +0000 (0:00:00.334) 0:01:35.849 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Remove the first PV and add the third disk to the 'foo' volume group] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:140 Thursday 01 June 2023 00:36:15 +0000 (0:00:00.084) 0:01:35.934 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:36:15 +0000 (0:00:00.150) 0:01:36.085 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:36:15 +0000 (0:00:00.116) 0:01:36.201 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:36:16 +0000 (0:00:00.440) 0:01:36.642 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:36:16 +0000 (0:00:00.251) 0:01:36.893 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:36:16 +0000 (0:00:00.079) 0:01:36.973 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:36:16 +0000 (0:00:00.081) 0:01:37.054 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:36:16 +0000 (0:00:00.157) 0:01:37.212 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:36:16 +0000 (0:00:00.082) 0:01:37.294 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sdb", "sdc" ], "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:36:16 +0000 (0:00:00.084) 0:01:37.379 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:36:17 +0000 (0:00:00.082) 0:01:37.462 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:36:17 +0000 (0:00:00.082) 0:01:37.544 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:36:17 +0000 (0:00:00.076) 0:01:37.621 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:36:17 +0000 (0:00:00.078) 0:01:37.699 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:36:17 +0000 (0:00:00.078) 0:01:37.778 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:36:17 +0000 (0:00:00.098) 0:01:37.877 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:36:17 +0000 (0:00:00.036) 0:01:37.913 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/sdc1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/foo", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:36:21 +0000 (0:00:03.511) 0:01:41.424 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.079) 0:01:41.504 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.037) 0:01:41.541 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/sdc1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/foo", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2" ], "pools": [ { "disks": [ "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.082) 0:01:41.624 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.080) 0:01:41.705 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.080) 0:01:41.785 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.038) 0:01:41.824 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.133) 0:01:41.957 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.040) 0:01:41.998 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.038) 0:01:42.036 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.081) 0:01:42.118 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579192.8006275, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1684244424.757, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131081, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1684244183.529, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3816983141", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:36:21 +0000 (0:00:00.271) 0:01:42.389 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:36:22 +0000 (0:00:00.038) 0:01:42.428 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:149 Thursday 01 June 2023 00:36:22 +0000 (0:00:00.679) 0:01:43.108 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:36:22 +0000 (0:00:00.123) 0:01:43.231 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sdb", "sdc" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:36:22 +0000 (0:00:00.081) 0:01:43.313 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:36:22 +0000 (0:00:00.080) 0:01:43.393 ********* ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdb1", "size": "10G", "type": "partition", "uuid": "6P9SQA-gfzm-hBMh-3UIW-lsM6-WO9z-ZUgvVx" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sdc1", "size": "10G", "type": "partition", "uuid": "EGN0eE-EEAU-YADV-dQBq-t3Ov-Cx9T-HyWxtK" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:36:23 +0000 (0:00:00.327) 0:01:43.721 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003357", "end": "2023-06-01 00:36:23.523670", "rc": 0, "start": "2023-06-01 00:36:23.520313" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:36:23 +0000 (0:00:00.269) 0:01:43.991 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003358", "end": "2023-06-01 00:36:23.790798", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:36:23.787440" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:36:23 +0000 (0:00:00.266) 0:01:44.257 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:36:23 +0000 (0:00:00.118) 0:01:44.376 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:36:24 +0000 (0:00:00.080) 0:01:44.456 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:36:24 +0000 (0:00:00.163) 0:01:44.619 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "2", "_storage_test_pool_pvs_lvm": [ "/dev/sdb1", "/dev/sdc1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:36:24 +0000 (0:00:00.087) 0:01:44.706 ********* ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdb1", "pv": "/dev/sdb1" } ok: [sut] => (item=/dev/sdc1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sdc1", "pv": "/dev/sdc1" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:36:24 +0000 (0:00:00.490) 0:01:45.196 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "2" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:36:24 +0000 (0:00:00.081) 0:01:45.278 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sdb1", "/dev/sdc1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:36:24 +0000 (0:00:00.111) 0:01:45.389 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.085) 0:01:45.475 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.084) 0:01:45.560 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.087) 0:01:45.647 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.084) 0:01:45.732 ********* ok: [sut] => (item=/dev/sdb1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdb1" } MSG: All assertions passed ok: [sut] => (item=/dev/sdc1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sdc1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.137) 0:01:45.869 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.133) 0:01:46.003 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.085) 0:01:46.088 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.082) 0:01:46.171 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.077) 0:01:46.248 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.080) 0:01:46.329 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:36:25 +0000 (0:00:00.078) 0:01:46.407 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.112) 0:01:46.520 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.080) 0:01:46.600 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.081) 0:01:46.681 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.121) 0:01:46.803 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.038) 0:01:46.842 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.121) 0:01:46.963 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.039) 0:01:47.003 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.122) 0:01:47.125 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.079) 0:01:47.205 ********* skipping: [sut] => (item=/dev/sdb1) => { "_storage_test_pool_member_path": "/dev/sdb1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } skipping: [sut] => (item=/dev/sdc1) => { "_storage_test_pool_member_path": "/dev/sdc1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:36:26 +0000 (0:00:00.150) 0:01:47.356 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.209) 0:01:47.565 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.085) 0:01:47.651 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.082) 0:01:47.733 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.081) 0:01:47.815 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.081) 0:01:47.897 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.080) 0:01:47.978 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.083) 0:01:48.061 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.094) 0:01:48.156 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.085) 0:01:48.241 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.081) 0:01:48.323 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:36:27 +0000 (0:00:00.089) 0:01:48.412 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.079) 0:01:48.491 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.148) 0:01:48.640 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.081) 0:01:48.721 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.122) 0:01:48.843 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.037) 0:01:48.881 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.080) 0:01:48.961 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.041) 0:01:49.003 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.042) 0:01:49.046 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:152 Thursday 01 June 2023 00:36:28 +0000 (0:00:00.083) 0:01:49.129 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.034769", "end": "2023-06-01 00:36:28.962475", "rc": 0, "start": "2023-06-01 00:36:28.927706" } STDOUT: ryCWJR-aiQG-u1Y7-UAI9-6KSj-Bhpy-1xxGXu TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:157 Thursday 01 June 2023 00:36:29 +0000 (0:00:00.300) 0:01:49.430 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Create volume group 'foo' with 3 encrypted PVs] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:162 Thursday 01 June 2023 00:36:29 +0000 (0:00:00.082) 0:01:49.513 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:36:29 +0000 (0:00:00.195) 0:01:49.708 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:36:29 +0000 (0:00:00.117) 0:01:49.825 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:36:29 +0000 (0:00:00.412) 0:01:50.238 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.251) 0:01:50.490 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.082) 0:01:50.572 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.080) 0:01:50.653 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.122) 0:01:50.776 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.081) 0:01:50.857 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_password": "yabbadabbadoo", "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.091) 0:01:50.948 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.084) 0:01:51.033 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.112) 0:01:51.146 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.094) 0:01:51.241 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.089) 0:01:51.330 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:36:30 +0000 (0:00:00.082) 0:01:51.413 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:36:31 +0000 (0:00:00.105) 0:01:51.518 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:36:31 +0000 (0:00:00.038) 0:01:51.557 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdi1", "fs_type": null }, { "action": "create format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdh1", "fs_type": null }, { "action": "create format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdg1", "fs_type": null }, { "action": "create format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdf1", "fs_type": null }, { "action": "create format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sde1", "fs_type": null }, { "action": "create format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdd1", "fs_type": null }, { "action": "create format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdi1", "name": "luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "password": "-", "state": "present" }, { "backing_device": "/dev/sdh1", "name": "luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "password": "-", "state": "present" }, { "backing_device": "/dev/sdg1", "name": "luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "password": "-", "state": "present" }, { "backing_device": "/dev/sdf1", "name": "luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "password": "-", "state": "present" }, { "backing_device": "/dev/sde1", "name": "luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "password": "-", "state": "present" }, { "backing_device": "/dev/sdd1", "name": "luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "password": "-", "state": "present" }, { "backing_device": "/dev/sdc1", "name": "luks-9691017d-5141-4325-aac7-42b402e47205", "password": "-", "state": "present" }, { "backing_device": "/dev/sdb1", "name": "luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "password": "-", "state": "present" }, { "backing_device": "/dev/sda1", "name": "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "password": "-", "state": "present" } ], "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "cryptsetup", "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:37:59 +0000 (0:01:28.738) 0:03:20.295 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:37:59 +0000 (0:00:00.080) 0:03:20.376 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:37:59 +0000 (0:00:00.037) 0:03:20.413 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdi1", "fs_type": null }, { "action": "create format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdh1", "fs_type": null }, { "action": "create format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdg1", "fs_type": null }, { "action": "create format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdf1", "fs_type": null }, { "action": "create format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sde1", "fs_type": null }, { "action": "create format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdd1", "fs_type": null }, { "action": "create format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdi1", "name": "luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "password": "-", "state": "present" }, { "backing_device": "/dev/sdh1", "name": "luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "password": "-", "state": "present" }, { "backing_device": "/dev/sdg1", "name": "luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "password": "-", "state": "present" }, { "backing_device": "/dev/sdf1", "name": "luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "password": "-", "state": "present" }, { "backing_device": "/dev/sde1", "name": "luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "password": "-", "state": "present" }, { "backing_device": "/dev/sdd1", "name": "luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "password": "-", "state": "present" }, { "backing_device": "/dev/sdc1", "name": "luks-9691017d-5141-4325-aac7-42b402e47205", "password": "-", "state": "present" }, { "backing_device": "/dev/sdb1", "name": "luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "password": "-", "state": "present" }, { "backing_device": "/dev/sda1", "name": "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "password": "-", "state": "present" } ], "failed": false, "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "cryptsetup", "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.113) 0:03:20.526 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.081) 0:03:20.607 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.080) 0:03:20.688 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.039) 0:03:20.728 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.081) 0:03:20.809 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.039) 0:03:20.849 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.038) 0:03:20.888 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.080) 0:03:20.969 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579192.8006275, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1684244424.757, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 131081, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1684244183.529, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "3816983141", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:38:00 +0000 (0:00:00.272) 0:03:21.241 ********* changed: [sut] => (item={'backing_device': '/dev/sdi1', 'name': 'luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdi1", "name": "luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdh1', 'name': 'luks-81c916fb-2009-498e-9df3-b2c9bfe1b010', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdh1", "name": "luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdg1', 'name': 'luks-ca09bff7-8815-4e47-b138-4ad04ffc521e', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdg1", "name": "luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdf1', 'name': 'luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdf1", "name": "luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sde1', 'name': 'luks-c5c1c77e-55c2-419b-a471-b32c6082162a', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sde1", "name": "luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdd1', 'name': 'luks-e64ad631-c2b6-4e08-a919-eca7df40a608', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdd1", "name": "luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdc1', 'name': 'luks-9691017d-5141-4325-aac7-42b402e47205', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdc1", "name": "luks-9691017d-5141-4325-aac7-42b402e47205", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdb1', 'name': 'luks-84377a3e-6dc0-4225-8d43-79236f6e9009', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdb1", "name": "luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sda1', 'name': 'luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda1", "name": "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "password": "-", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:38:03 +0000 (0:00:02.404) 0:03:23.645 ********* ok: [sut] TASK [Save UUID of the created volume group] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:173 Thursday 01 June 2023 00:38:04 +0000 (0:00:00.844) 0:03:24.489 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.039174", "end": "2023-06-01 00:38:04.330630", "rc": 0, "start": "2023-06-01 00:38:04.291456" } STDOUT: j7WAiB-LWYp-rGy3-oQg3-BNfN-ss58-QnUFIG TASK [Remove 2 PVs from the 'foo' volume group] ******************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:178 Thursday 01 June 2023 00:38:04 +0000 (0:00:00.310) 0:03:24.799 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:38:04 +0000 (0:00:00.156) 0:03:24.956 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:38:04 +0000 (0:00:00.119) 0:03:25.075 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.433) 0:03:25.509 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.250) 0:03:25.760 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.083) 0:03:25.843 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.084) 0:03:25.928 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.124) 0:03:26.053 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.083) 0:03:26.136 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda" ], "encryption": true, "encryption_password": "yabbadabbadoo", "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.085) 0:03:26.222 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.084) 0:03:26.306 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:38:05 +0000 (0:00:00.083) 0:03:26.390 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:38:06 +0000 (0:00:00.083) 0:03:26.473 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:38:06 +0000 (0:00:00.146) 0:03:26.620 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:38:06 +0000 (0:00:00.086) 0:03:26.706 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:38:06 +0000 (0:00:00.102) 0:03:26.809 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:38:06 +0000 (0:00:00.041) 0:03:26.851 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdd1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sde1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdf1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdh1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdi1", "fs_type": null } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdb1", "name": "luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdc1", "name": "luks-9691017d-5141-4325-aac7-42b402e47205", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdd1", "name": "luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "password": "-", "state": "absent" }, { "backing_device": "/dev/sde1", "name": "luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdf1", "name": "luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdg1", "name": "luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdh1", "name": "luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdi1", "name": "luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "password": "-", "state": "absent" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs", "cryptsetup" ], "pools": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:38:14 +0000 (0:00:08.049) 0:03:34.900 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:38:14 +0000 (0:00:00.109) 0:03:35.010 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:38:14 +0000 (0:00:00.039) 0:03:35.049 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-9691017d-5141-4325-aac7-42b402e47205", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdd1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sde1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdf1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdh1", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdi1", "fs_type": null } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdb1", "name": "luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdc1", "name": "luks-9691017d-5141-4325-aac7-42b402e47205", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdd1", "name": "luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "password": "-", "state": "absent" }, { "backing_device": "/dev/sde1", "name": "luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdf1", "name": "luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdg1", "name": "luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdh1", "name": "luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdi1", "name": "luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "password": "-", "state": "absent" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "lvm2", "e2fsprogs", "cryptsetup" ], "pools": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:38:14 +0000 (0:00:00.091) 0:03:35.140 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:38:14 +0000 (0:00:00.084) 0:03:35.225 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:38:14 +0000 (0:00:00.084) 0:03:35.310 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:38:14 +0000 (0:00:00.042) 0:03:35.352 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:38:15 +0000 (0:00:00.084) 0:03:35.437 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:38:15 +0000 (0:00:00.041) 0:03:35.478 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:38:15 +0000 (0:00:00.039) 0:03:35.518 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:38:15 +0000 (0:00:00.081) 0:03:35.600 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579883.1648164, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f54a38a00e8e51ed83b5662801974333f116d592", "ctime": 1685579883.1638165, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263329, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1685579883.1628163, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 486, "uid": 0, "version": "1166831929", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:38:15 +0000 (0:00:00.280) 0:03:35.880 ********* changed: [sut] => (item={'backing_device': '/dev/sdb1', 'name': 'luks-84377a3e-6dc0-4225-8d43-79236f6e9009', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdb1", "name": "luks-84377a3e-6dc0-4225-8d43-79236f6e9009", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdc1', 'name': 'luks-9691017d-5141-4325-aac7-42b402e47205', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdc1", "name": "luks-9691017d-5141-4325-aac7-42b402e47205", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdd1', 'name': 'luks-e64ad631-c2b6-4e08-a919-eca7df40a608', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdd1", "name": "luks-e64ad631-c2b6-4e08-a919-eca7df40a608", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sde1', 'name': 'luks-c5c1c77e-55c2-419b-a471-b32c6082162a', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sde1", "name": "luks-c5c1c77e-55c2-419b-a471-b32c6082162a", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdf1', 'name': 'luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdf1", "name": "luks-7058d75b-a03c-4541-a3e5-b97cf31fcb31", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdg1', 'name': 'luks-ca09bff7-8815-4e47-b138-4ad04ffc521e', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdg1", "name": "luks-ca09bff7-8815-4e47-b138-4ad04ffc521e", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdh1', 'name': 'luks-81c916fb-2009-498e-9df3-b2c9bfe1b010', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdh1", "name": "luks-81c916fb-2009-498e-9df3-b2c9bfe1b010", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdi1', 'name': 'luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdi1", "name": "luks-0531f114-f5dd-48c1-90e6-9b2ac82a6ead", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:38:17 +0000 (0:00:02.057) 0:03:37.937 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:188 Thursday 01 June 2023 00:38:18 +0000 (0:00:00.677) 0:03:38.615 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:38:18 +0000 (0:00:00.130) 0:03:38.746 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:38:18 +0000 (0:00:00.083) 0:03:38.829 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:38:18 +0000 (0:00:00.077) 0:03:38.907 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "size": "10G", "type": "crypt", "uuid": "556frR-by3b-56YK-W5Xh-ar5h-QXLv-YXZrgg" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "64873b1e-1f7f-45fd-8324-4f067b7b29bf" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:38:18 +0000 (0:00:00.328) 0:03:39.235 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003354", "end": "2023-06-01 00:38:19.035440", "rc": 0, "start": "2023-06-01 00:38:19.032086" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:38:19 +0000 (0:00:00.267) 0:03:39.502 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003245", "end": "2023-06-01 00:38:19.300629", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:38:19.297384" } STDOUT: luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf /dev/sda1 - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:38:19 +0000 (0:00:00.262) 0:03:39.765 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:38:19 +0000 (0:00:00.116) 0:03:39.881 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:38:19 +0000 (0:00:00.082) 0:03:39.964 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:38:19 +0000 (0:00:00.159) 0:03:40.124 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:38:19 +0000 (0:00:00.084) 0:03:40.208 ********* ok: [sut] => (item=/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "pv": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.263) 0:03:40.472 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.083) 0:03:40.555 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.112) 0:03:40.667 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.083) 0:03:40.751 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "crypt" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.082) 0:03:40.833 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.078) 0:03:40.912 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.079) 0:03:40.992 ********* ok: [sut] => (item=/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.087) 0:03:41.079 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.119) 0:03:41.199 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.075) 0:03:41.275 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:38:20 +0000 (0:00:00.079) 0:03:41.355 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.080) 0:03:41.435 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.078) 0:03:41.513 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.078) 0:03:41.592 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.076) 0:03:41.668 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.110) 0:03:41.779 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.082) 0:03:41.861 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.121) 0:03:41.982 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.039) 0:03:42.022 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.123) 0:03:42.145 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.039) 0:03:42.184 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.128) 0:03:42.313 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:38:21 +0000 (0:00:00.083) 0:03:42.396 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:38:22 +0000 (0:00:00.117) 0:03:42.513 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/64873b1e-1f7f-45fd-8324-4f067b7b29bf" ], "delta": "0:00:00.005305", "end": "2023-06-01 00:38:22.352140", "rc": 0, "start": "2023-06-01 00:38:22.346835" } STDOUT: /dev/sda1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:38:22 +0000 (0:00:00.302) 0:03:42.816 ********* changed: [sut] => { "changed": true, "rc": 0, "results": [ "Installed: cryptsetup-2.4.3-2.fc36.x86_64" ] } TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:38:25 +0000 (0:00:03.183) 0:03:45.999 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sda1" ], "delta": "0:00:00.016148", "end": "2023-06-01 00:38:25.817247", "rc": 0, "start": "2023-06-01 00:38:25.801099" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 64873b1e-1f7f-45fd-8324-4f067b7b29bf Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 723557 Threads: 2 Salt: 3b 97 24 e1 09 05 dc 96 b3 a6 5d d2 12 e2 37 64 fa 54 84 89 30 10 40 f3 00 83 69 77 ec fa c2 d9 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 94160 Salt: 1f 8c f2 bd 63 e9 bf b6 d5 22 18 34 a7 fd 2b 42 54 f2 9f 33 54 c8 a0 1a f1 5b b6 ae 1a c9 60 7c Digest: d6 0d 9b 10 c1 87 bc ff f6 4f c2 b5 fc f9 91 c4 00 a2 32 32 86 35 a7 92 88 a4 d2 83 f7 c7 cb 2c TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:38:25 +0000 (0:00:00.282) 0:03:46.282 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:38:25 +0000 (0:00:00.088) 0:03:46.371 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.082) 0:03:46.453 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.084) 0:03:46.538 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.171) 0:03:46.710 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf /dev/sda1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.082) 0:03:46.792 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.082) 0:03:46.875 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.084) 0:03:46.959 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.080) 0:03:47.040 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.083) 0:03:47.124 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.080) 0:03:47.204 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.079) 0:03:47.284 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:38:26 +0000 (0:00:00.126) 0:03:47.410 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.039) 0:03:47.450 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.079) 0:03:47.530 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.069) 0:03:47.599 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.038) 0:03:47.637 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:191 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.081) 0:03:47.719 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.034371", "end": "2023-06-01 00:38:27.549233", "rc": 0, "start": "2023-06-01 00:38:27.514862" } STDOUT: j7WAiB-LWYp-rGy3-oQg3-BNfN-ss58-QnUFIG TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:196 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.295) 0:03:48.014 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Add the disks back to the 'foo' volume group] **************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:201 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.080) 0:03:48.094 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.152) 0:03:48.247 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:38:27 +0000 (0:00:00.109) 0:03:48.356 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:38:28 +0000 (0:00:00.409) 0:03:48.766 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:38:28 +0000 (0:00:00.279) 0:03:49.046 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:38:28 +0000 (0:00:00.151) 0:03:49.198 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:38:28 +0000 (0:00:00.081) 0:03:49.279 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:38:28 +0000 (0:00:00.125) 0:03:49.405 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.080) 0:03:49.485 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_password": "yabbadabbadoo", "name": "foo" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.083) 0:03:49.568 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.082) 0:03:49.651 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.081) 0:03:49.732 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.078) 0:03:49.811 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.082) 0:03:49.893 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.080) 0:03:49.974 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.103) 0:03:50.077 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:38:29 +0000 (0:00:00.036) 0:03:50.114 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdi1", "fs_type": null }, { "action": "create format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": null }, { "action": "create format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdh1", "fs_type": null }, { "action": "create format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": null }, { "action": "create format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdg1", "fs_type": null }, { "action": "create format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": null }, { "action": "create format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdf1", "fs_type": null }, { "action": "create format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": null }, { "action": "create format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sde1", "fs_type": null }, { "action": "create format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": null }, { "action": "create format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdd1", "fs_type": null }, { "action": "create format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": null }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": null }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": null } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdi1", "name": "luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "password": "-", "state": "present" }, { "backing_device": "/dev/sdh1", "name": "luks-d883cc77-3a08-4195-964a-a36eeeaba914", "password": "-", "state": "present" }, { "backing_device": "/dev/sdg1", "name": "luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "password": "-", "state": "present" }, { "backing_device": "/dev/sdf1", "name": "luks-3cd56d06-12d0-4fad-9377-924c3397d046", "password": "-", "state": "present" }, { "backing_device": "/dev/sde1", "name": "luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "password": "-", "state": "present" }, { "backing_device": "/dev/sdd1", "name": "luks-0186f638-3b3f-4a95-82cc-442546e8c270", "password": "-", "state": "present" }, { "backing_device": "/dev/sdc1", "name": "luks-16934d06-eaad-4d70-befb-6c32fba847bb", "password": "-", "state": "present" }, { "backing_device": "/dev/sdb1", "name": "luks-f3882e28-984d-41d5-82cf-57c1823616b6", "password": "-", "state": "present" } ], "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "cryptsetup", "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:39:48 +0000 (0:01:18.870) 0:05:08.984 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:39:48 +0000 (0:00:00.084) 0:05:09.068 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:39:48 +0000 (0:00:00.039) 0:05:09.108 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdi1", "fs_type": null }, { "action": "create format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": null }, { "action": "create format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdh1", "fs_type": null }, { "action": "create format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": null }, { "action": "create format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdg1", "fs_type": null }, { "action": "create format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": null }, { "action": "create format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdf1", "fs_type": null }, { "action": "create format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": null }, { "action": "create format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sde1", "fs_type": null }, { "action": "create format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": null }, { "action": "create format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdd1", "fs_type": null }, { "action": "create format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": null }, { "action": "create format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdc1", "fs_type": null }, { "action": "create format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": null }, { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "create device", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": null } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdi1", "name": "luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "password": "-", "state": "present" }, { "backing_device": "/dev/sdh1", "name": "luks-d883cc77-3a08-4195-964a-a36eeeaba914", "password": "-", "state": "present" }, { "backing_device": "/dev/sdg1", "name": "luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "password": "-", "state": "present" }, { "backing_device": "/dev/sdf1", "name": "luks-3cd56d06-12d0-4fad-9377-924c3397d046", "password": "-", "state": "present" }, { "backing_device": "/dev/sde1", "name": "luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "password": "-", "state": "present" }, { "backing_device": "/dev/sdd1", "name": "luks-0186f638-3b3f-4a95-82cc-442546e8c270", "password": "-", "state": "present" }, { "backing_device": "/dev/sdc1", "name": "luks-16934d06-eaad-4d70-befb-6c32fba847bb", "password": "-", "state": "present" }, { "backing_device": "/dev/sdb1", "name": "luks-f3882e28-984d-41d5-82cf-57c1823616b6", "password": "-", "state": "present" } ], "failed": false, "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/foo" ], "mounts": [], "packages": [ "cryptsetup", "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:39:48 +0000 (0:00:00.114) 0:05:09.223 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:39:48 +0000 (0:00:00.080) 0:05:09.304 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:39:48 +0000 (0:00:00.081) 0:05:09.385 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:39:49 +0000 (0:00:00.039) 0:05:09.424 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:39:49 +0000 (0:00:00.079) 0:05:09.503 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:39:49 +0000 (0:00:00.037) 0:05:09.541 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:39:49 +0000 (0:00:00.039) 0:05:09.581 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:39:49 +0000 (0:00:00.078) 0:05:09.660 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579897.45782, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7fef6c1d9b322739b7e9733693005fcf4784d26d", "ctime": 1685579897.4558198, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263336, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1685579897.45482, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 54, "uid": 0, "version": "3297003748", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:39:49 +0000 (0:00:00.272) 0:05:09.932 ********* changed: [sut] => (item={'backing_device': '/dev/sdi1', 'name': 'luks-0b33f446-abba-4842-8c74-24ef3a1656a0', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdi1", "name": "luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdh1', 'name': 'luks-d883cc77-3a08-4195-964a-a36eeeaba914', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdh1", "name": "luks-d883cc77-3a08-4195-964a-a36eeeaba914", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdg1', 'name': 'luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdg1", "name": "luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdf1', 'name': 'luks-3cd56d06-12d0-4fad-9377-924c3397d046', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdf1", "name": "luks-3cd56d06-12d0-4fad-9377-924c3397d046", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sde1', 'name': 'luks-015dacde-2ae5-47eb-add7-fd80c781bea8', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sde1", "name": "luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdd1', 'name': 'luks-0186f638-3b3f-4a95-82cc-442546e8c270', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdd1", "name": "luks-0186f638-3b3f-4a95-82cc-442546e8c270", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdc1', 'name': 'luks-16934d06-eaad-4d70-befb-6c32fba847bb', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdc1", "name": "luks-16934d06-eaad-4d70-befb-6c32fba847bb", "password": "-", "state": "present" } } MSG: line added changed: [sut] => (item={'backing_device': '/dev/sdb1', 'name': 'luks-f3882e28-984d-41d5-82cf-57c1823616b6', 'password': '-', 'state': 'present'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdb1", "name": "luks-f3882e28-984d-41d5-82cf-57c1823616b6", "password": "-", "state": "present" } } MSG: line added TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:39:51 +0000 (0:00:02.141) 0:05:12.074 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:211 Thursday 01 June 2023 00:39:52 +0000 (0:00:00.725) 0:05:12.800 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:39:52 +0000 (0:00:00.141) 0:05:12.941 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": "yabbadabbadoo", "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:39:52 +0000 (0:00:00.087) 0:05:13.029 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:39:52 +0000 (0:00:00.083) 0:05:13.113 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "size": "1024G", "type": "crypt", "uuid": "KQOyNL-G3bp-j7ot-qge9-bTOt-Ll5b-HcHtK7" }, "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "size": "1024G", "type": "crypt", "uuid": "OmJEDN-Kbfn-LSrr-Tfst-m4Mo-J9ak-pxtgHw" }, "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "size": "10G", "type": "crypt", "uuid": "IJsJQ0-W3Gu-1zff-21NX-nuk8-PyIZ-OZ8MxO" }, "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "size": "10G", "type": "crypt", "uuid": "LkEF0B-lWE8-EXD8-Y4Oo-5lsN-zskx-vBzrHO" }, "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "size": "10G", "type": "crypt", "uuid": "C4GZPd-npYh-3fEA-8uQ7-RWf8-puHH-dsgkBS" }, "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "size": "10G", "type": "crypt", "uuid": "556frR-by3b-56YK-W5Xh-ar5h-QXLv-YXZrgg" }, "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "size": "1024G", "type": "crypt", "uuid": "jWHzmG-SV6U-OrBN-Ss2a-9IrK-77a9-3kh70U" }, "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "size": "10G", "type": "crypt", "uuid": "4RDpTR-PIlJ-Fll0-dze2-GH3s-hAhD-mwcJtm" }, "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6": { "fstype": "LVM2_member", "label": "", "name": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "size": "10G", "type": "crypt", "uuid": "D0JnE4-pn8O-jw1C-f5KB-SZsS-REMg-53N4zY" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "64873b1e-1f7f-45fd-8324-4f067b7b29bf" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sdb1", "size": "10G", "type": "partition", "uuid": "f3882e28-984d-41d5-82cf-57c1823616b6" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sdc1", "size": "10G", "type": "partition", "uuid": "16934d06-eaad-4d70-befb-6c32fba847bb" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdd1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sdd1", "size": "1024G", "type": "partition", "uuid": "0186f638-3b3f-4a95-82cc-442546e8c270" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sde1", "size": "1024G", "type": "partition", "uuid": "015dacde-2ae5-47eb-add7-fd80c781bea8" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdf1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sdf1", "size": "10G", "type": "partition", "uuid": "3cd56d06-12d0-4fad-9377-924c3397d046" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdg1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sdg1", "size": "1024G", "type": "partition", "uuid": "c1147fea-f027-4101-8d2c-d41f7dea6f8a" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdh1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sdh1", "size": "10G", "type": "partition", "uuid": "d883cc77-3a08-4195-964a-a36eeeaba914" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi1": { "fstype": "crypto_LUKS", "label": "", "name": "/dev/sdi1", "size": "10G", "type": "partition", "uuid": "0b33f446-abba-4842-8c74-24ef3a1656a0" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:39:52 +0000 (0:00:00.286) 0:05:13.399 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003411", "end": "2023-06-01 00:39:53.206392", "rc": 0, "start": "2023-06-01 00:39:53.202981" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:39:53 +0000 (0:00:00.306) 0:05:13.705 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003316", "end": "2023-06-01 00:39:53.508857", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:39:53.505541" } STDOUT: luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf /dev/sda1 - luks-0b33f446-abba-4842-8c74-24ef3a1656a0 /dev/sdi1 - luks-d883cc77-3a08-4195-964a-a36eeeaba914 /dev/sdh1 - luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a /dev/sdg1 - luks-3cd56d06-12d0-4fad-9377-924c3397d046 /dev/sdf1 - luks-015dacde-2ae5-47eb-add7-fd80c781bea8 /dev/sde1 - luks-0186f638-3b3f-4a95-82cc-442546e8c270 /dev/sdd1 - luks-16934d06-eaad-4d70-befb-6c32fba847bb /dev/sdc1 - luks-f3882e28-984d-41d5-82cf-57c1823616b6 /dev/sdb1 - TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:39:53 +0000 (0:00:00.270) 0:05:13.976 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:39:53 +0000 (0:00:00.124) 0:05:14.101 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:39:53 +0000 (0:00:00.087) 0:05:14.188 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:39:53 +0000 (0:00:00.178) 0:05:14.367 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "9", "_storage_test_pool_pvs_lvm": [ "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:39:54 +0000 (0:00:00.091) 0:05:14.459 ********* ok: [sut] => (item=/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "pv": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8" } ok: [sut] => (item=/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "pv": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270" } ok: [sut] => (item=/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "pv": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0" } ok: [sut] => (item=/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "pv": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb" } ok: [sut] => (item=/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "pv": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046" } ok: [sut] => (item=/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "pv": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf" } ok: [sut] => (item=/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "pv": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a" } ok: [sut] => (item=/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "pv": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914" } ok: [sut] => (item=/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "pv": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:39:56 +0000 (0:00:02.132) 0:05:16.591 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "9" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:39:56 +0000 (0:00:00.086) 0:05:16.677 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:39:56 +0000 (0:00:00.087) 0:05:16.764 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:39:56 +0000 (0:00:00.088) 0:05:16.852 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "crypt" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:39:56 +0000 (0:00:00.088) 0:05:16.941 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:39:56 +0000 (0:00:00.082) 0:05:17.023 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:39:56 +0000 (0:00:00.083) 0:05:17.107 ********* ok: [sut] => (item=/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914" } MSG: All assertions passed ok: [sut] => (item=/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.489) 0:05:17.597 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.152) 0:05:17.749 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.082) 0:05:17.832 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.081) 0:05:17.913 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.083) 0:05:17.996 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.083) 0:05:18.080 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.084) 0:05:18.164 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.084) 0:05:18.249 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.082) 0:05:18.331 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:39:57 +0000 (0:00:00.083) 0:05:18.415 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:39:58 +0000 (0:00:00.124) 0:05:18.540 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:39:58 +0000 (0:00:00.039) 0:05:18.579 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:39:58 +0000 (0:00:00.171) 0:05:18.751 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:39:58 +0000 (0:00:00.039) 0:05:18.791 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:39:58 +0000 (0:00:00.129) 0:05:18.920 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:39:58 +0000 (0:00:00.086) 0:05:19.007 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml for sut TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:39:59 +0000 (0:00:00.488) 0:05:19.495 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/015dacde-2ae5-47eb-add7-fd80c781bea8" ], "delta": "0:00:00.003189", "end": "2023-06-01 00:39:59.299676", "rc": 0, "start": "2023-06-01 00:39:59.296487" } STDOUT: /dev/sde1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:39:59 +0000 (0:00:00.271) 0:05:19.767 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:02 +0000 (0:00:03.630) 0:05:23.397 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sde1" ], "delta": "0:00:00.009646", "end": "2023-06-01 00:40:03.214742", "rc": 0, "start": "2023-06-01 00:40:03.205096" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 015dacde-2ae5-47eb-add7-fd80c781bea8 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 718404 Threads: 2 Salt: 25 be 53 d1 47 d8 3d db db 6f de 2a ce b6 7d d1 e9 2f 53 67 e6 ed e6 95 17 35 75 5a 12 d8 b1 2b AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 94025 Salt: 10 96 fb c3 1a f8 af 13 14 a6 2d 11 d9 30 9f 78 20 83 a3 f5 72 c3 8c d2 c7 ed ac 44 fb 93 93 29 Digest: c1 f1 18 b8 06 81 9a 06 9b 11 0b 65 56 32 8d 49 1a 95 c0 47 24 36 04 61 2f 0f a4 d9 69 1d b3 ed TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:03 +0000 (0:00:00.287) 0:05:23.684 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:03 +0000 (0:00:00.095) 0:05:23.779 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:03 +0000 (0:00:00.086) 0:05:23.866 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:03 +0000 (0:00:00.083) 0:05:23.950 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/0186f638-3b3f-4a95-82cc-442546e8c270" ], "delta": "0:00:00.003319", "end": "2023-06-01 00:40:03.788940", "rc": 0, "start": "2023-06-01 00:40:03.785621" } STDOUT: /dev/sdd1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:03 +0000 (0:00:00.305) 0:05:24.255 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:06 +0000 (0:00:02.446) 0:05:26.702 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sdd1" ], "delta": "0:00:00.009610", "end": "2023-06-01 00:40:06.516221", "rc": 0, "start": "2023-06-01 00:40:06.506611" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 0186f638-3b3f-4a95-82cc-442546e8c270 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 722396 Threads: 2 Salt: b9 9e d8 80 d8 42 d5 17 4f f2 6c 02 0d cd 7a d0 32 ac 71 85 7d 33 9a 4d bf 0a 8c 9d c3 a9 36 5b AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 94296 Salt: 77 86 d0 d6 3f 49 58 62 9d d4 40 dc 48 9b f5 9e 49 98 57 66 b1 1d f4 25 04 a3 a9 fb ba 0b 86 08 Digest: 8f 30 6e 8c 8b 79 91 a7 79 28 ab a9 b1 18 e4 64 b9 d0 80 a9 04 36 d4 56 5e 93 e3 61 eb 47 dd 68 TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:06 +0000 (0:00:00.282) 0:05:26.985 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:06 +0000 (0:00:00.093) 0:05:27.078 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:06 +0000 (0:00:00.086) 0:05:27.164 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:06 +0000 (0:00:00.150) 0:05:27.315 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/0b33f446-abba-4842-8c74-24ef3a1656a0" ], "delta": "0:00:00.003339", "end": "2023-06-01 00:40:07.124887", "rc": 0, "start": "2023-06-01 00:40:07.121548" } STDOUT: /dev/sdi1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:07 +0000 (0:00:00.277) 0:05:27.592 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:09 +0000 (0:00:02.462) 0:05:30.055 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sdi1" ], "delta": "0:00:00.009447", "end": "2023-06-01 00:40:09.873416", "rc": 0, "start": "2023-06-01 00:40:09.863969" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 0b33f446-abba-4842-8c74-24ef3a1656a0 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 725968 Threads: 2 Salt: fa 73 b7 ce 88 f8 dc e8 fb 79 2f 76 b6 59 b4 79 97 ef c9 5f 39 dd d7 8a c4 22 b9 9c 2a 0c 7c c6 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 94025 Salt: c5 33 fb c2 11 42 39 ce 31 05 d8 4f 1c 7c 3f b5 64 b8 6a 74 b9 a0 24 73 16 9b 30 f6 6e 7e a8 cc Digest: 9d f3 3d f0 36 8f d2 06 6d 7c 77 52 9f 82 a1 0e 28 dd a4 2c 10 68 ec 5a 4d 12 d2 20 11 6a bc 3f TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:09 +0000 (0:00:00.286) 0:05:30.342 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:10 +0000 (0:00:00.092) 0:05:30.435 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:10 +0000 (0:00:00.154) 0:05:30.589 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:10 +0000 (0:00:00.086) 0:05:30.675 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/16934d06-eaad-4d70-befb-6c32fba847bb" ], "delta": "0:00:00.003306", "end": "2023-06-01 00:40:10.482466", "rc": 0, "start": "2023-06-01 00:40:10.479160" } STDOUT: /dev/sdc1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:10 +0000 (0:00:00.274) 0:05:30.950 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:12 +0000 (0:00:02.462) 0:05:33.412 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sdc1" ], "delta": "0:00:00.009597", "end": "2023-06-01 00:40:13.224500", "rc": 0, "start": "2023-06-01 00:40:13.214903" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 16934d06-eaad-4d70-befb-6c32fba847bb Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 725274 Threads: 2 Salt: 79 49 97 30 9e 99 7f b8 0b ce 18 ac 83 e7 10 2e 25 a7 38 54 f0 f7 aa 3d c0 39 3b fe d4 c4 eb b8 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 93756 Salt: e4 f3 37 f9 27 00 31 23 f6 83 05 26 cf 44 12 15 3a 79 f2 e2 da 24 b0 01 db d6 4f 0a 85 88 7e 02 Digest: 0d 9f fa f8 07 e9 cb b2 3e db 4f 40 7a 8e 19 f3 63 1a d5 50 34 7d cd e5 b6 b6 92 5f e5 27 f0 2b TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:13 +0000 (0:00:00.280) 0:05:33.693 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:13 +0000 (0:00:00.090) 0:05:33.783 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:13 +0000 (0:00:00.113) 0:05:33.897 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:13 +0000 (0:00:00.086) 0:05:33.983 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/3cd56d06-12d0-4fad-9377-924c3397d046" ], "delta": "0:00:00.003339", "end": "2023-06-01 00:40:13.788829", "rc": 0, "start": "2023-06-01 00:40:13.785490" } STDOUT: /dev/sdf1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:13 +0000 (0:00:00.273) 0:05:34.257 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:16 +0000 (0:00:02.403) 0:05:36.660 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sdf1" ], "delta": "0:00:00.010133", "end": "2023-06-01 00:40:16.474306", "rc": 0, "start": "2023-06-01 00:40:16.464173" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 3cd56d06-12d0-4fad-9377-924c3397d046 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 725274 Threads: 2 Salt: 00 9b e0 f5 2e c6 8a 05 92 98 d3 14 68 22 1f 97 04 00 b8 fa 65 4d 29 77 09 a8 75 5f df 3b a2 41 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 94025 Salt: f1 e7 61 f5 a6 27 e0 d4 95 4e 5a fa 9c 08 db a2 53 25 9d 4a 08 04 42 db 4e 52 f1 66 56 ad 6a 74 Digest: 7c 42 09 59 83 28 2c 49 62 0a d6 ab 21 40 99 3a cb f3 69 5c 24 01 d1 80 ec ae b9 ba 78 a5 d5 a5 TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:16 +0000 (0:00:00.283) 0:05:36.944 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:16 +0000 (0:00:00.153) 0:05:37.098 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:16 +0000 (0:00:00.087) 0:05:37.185 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:16 +0000 (0:00:00.085) 0:05:37.271 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/64873b1e-1f7f-45fd-8324-4f067b7b29bf" ], "delta": "0:00:00.003228", "end": "2023-06-01 00:40:17.078823", "rc": 0, "start": "2023-06-01 00:40:17.075595" } STDOUT: /dev/sda1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:17 +0000 (0:00:00.275) 0:05:37.547 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:19 +0000 (0:00:02.452) 0:05:40.000 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sda1" ], "delta": "0:00:00.009632", "end": "2023-06-01 00:40:19.816674", "rc": 0, "start": "2023-06-01 00:40:19.807042" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: 64873b1e-1f7f-45fd-8324-4f067b7b29bf Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 723557 Threads: 2 Salt: 3b 97 24 e1 09 05 dc 96 b3 a6 5d d2 12 e2 37 64 fa 54 84 89 30 10 40 f3 00 83 69 77 ec fa c2 d9 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 94160 Salt: 1f 8c f2 bd 63 e9 bf b6 d5 22 18 34 a7 fd 2b 42 54 f2 9f 33 54 c8 a0 1a f1 5b b6 ae 1a c9 60 7c Digest: d6 0d 9b 10 c1 87 bc ff f6 4f c2 b5 fc f9 91 c4 00 a2 32 32 86 35 a7 92 88 a4 d2 83 f7 c7 cb 2c TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:19 +0000 (0:00:00.316) 0:05:40.317 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:19 +0000 (0:00:00.093) 0:05:40.410 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:20 +0000 (0:00:00.086) 0:05:40.496 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:20 +0000 (0:00:00.087) 0:05:40.583 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/c1147fea-f027-4101-8d2c-d41f7dea6f8a" ], "delta": "0:00:00.003313", "end": "2023-06-01 00:40:20.391310", "rc": 0, "start": "2023-06-01 00:40:20.387997" } STDOUT: /dev/sdg1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:20 +0000 (0:00:00.277) 0:05:40.861 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:22 +0000 (0:00:02.423) 0:05:43.284 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sdg1" ], "delta": "0:00:00.009266", "end": "2023-06-01 00:40:23.138419", "rc": 0, "start": "2023-06-01 00:40:23.129153" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: c1147fea-f027-4101-8d2c-d41f7dea6f8a Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 723557 Threads: 2 Salt: ba 59 4a f3 c8 55 79 01 6e b3 2c 2b 14 a8 89 57 3d 4a d4 1c 4d f5 c4 b6 ca b2 f5 98 b2 65 d6 b1 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 93756 Salt: 12 d6 31 76 c1 99 0d 0e 29 cb 36 58 0c 62 d9 bc ba 1a fc 06 e3 e3 3f db 91 c4 70 1a 2b 09 95 91 Digest: b3 39 e2 90 2f d8 9b ce 36 7f 3c 1e b2 48 02 94 3e 1e e8 c3 95 eb 4b a1 24 23 31 03 47 2a 04 f2 TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:23 +0000 (0:00:00.351) 0:05:43.635 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:23 +0000 (0:00:00.093) 0:05:43.729 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:23 +0000 (0:00:00.087) 0:05:43.817 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:23 +0000 (0:00:00.087) 0:05:43.904 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/d883cc77-3a08-4195-964a-a36eeeaba914" ], "delta": "0:00:00.003291", "end": "2023-06-01 00:40:23.711960", "rc": 0, "start": "2023-06-01 00:40:23.708669" } STDOUT: /dev/sdh1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:23 +0000 (0:00:00.276) 0:05:44.180 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:26 +0000 (0:00:02.408) 0:05:46.589 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sdh1" ], "delta": "0:00:00.009687", "end": "2023-06-01 00:40:26.432476", "rc": 0, "start": "2023-06-01 00:40:26.422789" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: d883cc77-3a08-4195-964a-a36eeeaba914 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 723557 Threads: 2 Salt: e8 32 aa 82 b6 66 5d f5 c8 c6 7b 89 38 ca a3 8b e8 0e ce 34 e0 78 e4 75 dc b0 a9 4a dc 58 34 c9 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 94025 Salt: 00 69 dd 45 b1 b4 86 73 7e 77 23 3a 85 c7 93 a3 28 1b e2 ca 53 dc be 1b 9c 39 33 c4 1f 4c ce f6 Digest: 22 d3 0d 3b 45 64 33 26 5c 11 b1 59 88 d6 5c ec 16 0d 02 cc b2 a2 31 61 fa 78 8d e9 74 8f 16 c6 TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:26 +0000 (0:00:00.310) 0:05:46.899 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:26 +0000 (0:00:00.092) 0:05:46.991 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:26 +0000 (0:00:00.085) 0:05:47.076 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get the backing device path] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:2 Thursday 01 June 2023 00:40:26 +0000 (0:00:00.087) 0:05:47.164 ********* ok: [sut] => { "changed": false, "cmd": [ "realpath", "/dev/disk/by-uuid/f3882e28-984d-41d5-82cf-57c1823616b6" ], "delta": "0:00:00.003241", "end": "2023-06-01 00:40:26.968539", "rc": 0, "start": "2023-06-01 00:40:26.965298" } STDOUT: /dev/sdb1 TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Thursday 01 June 2023 00:40:27 +0000 (0:00:00.271) 0:05:47.435 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this member] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:17 Thursday 01 June 2023 00:40:29 +0000 (0:00:02.445) 0:05:49.881 ********* ok: [sut] => { "changed": false, "cmd": [ "cryptsetup", "luksDump", "/dev/sdb1" ], "delta": "0:00:00.009506", "end": "2023-06-01 00:40:29.696325", "rc": 0, "start": "2023-06-01 00:40:29.686819" } STDOUT: LUKS header information Version: 2 Epoch: 3 Metadata area: 16384 [bytes] Keyslots area: 16744448 [bytes] UUID: f3882e28-984d-41d5-82cf-57c1823616b6 Label: (no label) Subsystem: (no subsystem) Flags: (no flags) Data segments: 0: crypt offset: 16777216 [bytes] length: (whole device) cipher: aes-xts-plain64 sector: 512 [bytes] Keyslots: 0: luks2 Key: 512 bits Priority: normal Cipher: aes-xts-plain64 Cipher key: 512 bits PBKDF: argon2id Time cost: 4 Memory: 722396 Threads: 2 Salt: 4a 2a db 05 3e b0 13 16 24 50 57 1d a4 38 c3 bb c0 94 0f 7c fc 7c 53 03 31 52 42 88 1d df 13 96 AF stripes: 4000 AF hash: sha256 Area offset:32768 [bytes] Area length:258048 [bytes] Digest ID: 0 Tokens: Digests: 0: pbkdf2 Hash: sha256 Iterations: 93891 Salt: f6 69 89 df 67 12 d2 69 6c ad f8 6c f9 be b3 b3 cf 73 08 ab 63 4d f0 31 3a 7c bf be 94 f0 60 6f Digest: 02 ee c5 fd 95 3b c9 33 9e 82 04 c2 fa 89 1e 29 f3 85 41 ed 72 13 ab ef 53 2a 18 68 0b 39 bd 2e TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:25 Thursday 01 June 2023 00:40:29 +0000 (0:00:00.283) 0:05:50.164 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:38 Thursday 01 June 2023 00:40:29 +0000 (0:00:00.096) 0:05:50.260 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:50 Thursday 01 June 2023 00:40:29 +0000 (0:00:00.088) 0:05:50.349 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:40:30 +0000 (0:00:00.088) 0:05:50.438 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:30 +0000 (0:00:00.527) 0:05:50.965 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-015dacde-2ae5-47eb-add7-fd80c781bea8 /dev/sde1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:30 +0000 (0:00:00.089) 0:05:51.055 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:30 +0000 (0:00:00.090) 0:05:51.145 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:30 +0000 (0:00:00.089) 0:05:51.234 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:30 +0000 (0:00:00.157) 0:05:51.392 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.090) 0:05:51.483 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.085) 0:05:51.568 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-0186f638-3b3f-4a95-82cc-442546e8c270 /dev/sdd1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.089) 0:05:51.657 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.086) 0:05:51.744 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.088) 0:05:51.832 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.084) 0:05:51.917 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.088) 0:05:52.006 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.087) 0:05:52.093 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-0b33f446-abba-4842-8c74-24ef3a1656a0 /dev/sdi1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.091) 0:05:52.185 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.087) 0:05:52.273 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:31 +0000 (0:00:00.089) 0:05:52.362 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.169) 0:05:52.531 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.091) 0:05:52.623 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.085) 0:05:52.709 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-16934d06-eaad-4d70-befb-6c32fba847bb /dev/sdc1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.090) 0:05:52.799 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.088) 0:05:52.888 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.089) 0:05:52.977 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.085) 0:05:53.063 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.089) 0:05:53.152 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.089) 0:05:53.242 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-3cd56d06-12d0-4fad-9377-924c3397d046 /dev/sdf1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:32 +0000 (0:00:00.090) 0:05:53.332 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.086) 0:05:53.419 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.091) 0:05:53.510 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.179) 0:05:53.690 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.099) 0:05:53.789 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.086) 0:05:53.875 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf /dev/sda1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.095) 0:05:53.971 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.091) 0:05:54.062 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.099) 0:05:54.162 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.086) 0:05:54.248 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:33 +0000 (0:00:00.090) 0:05:54.338 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.093) 0:05:54.432 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a /dev/sdg1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.092) 0:05:54.524 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.090) 0:05:54.615 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.106) 0:05:54.721 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.148) 0:05:54.870 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.090) 0:05:54.960 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.086) 0:05:55.046 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-d883cc77-3a08-4195-964a-a36eeeaba914 /dev/sdh1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.090) 0:05:55.137 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.089) 0:05:55.226 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.089) 0:05:55.316 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:34 +0000 (0:00:00.082) 0:05:55.398 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.087) 0:05:55.486 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.088) 0:05:55.574 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [ "luks-f3882e28-984d-41d5-82cf-57c1823616b6 /dev/sdb1 -" ] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.094) 0:05:55.668 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.087) 0:05:55.755 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.088) 0:05:55.844 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.146) 0:05:55.990 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.092) 0:05:56.082 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.087) 0:05:56.170 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.085) 0:05:56.255 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:40:35 +0000 (0:00:00.136) 0:05:56.392 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.041) 0:05:56.433 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.085) 0:05:56.518 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.047) 0:05:56.566 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.040) 0:05:56.606 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:214 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.082) 0:05:56.689 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.039787", "end": "2023-06-01 00:40:36.530822", "rc": 0, "start": "2023-06-01 00:40:36.491035" } STDOUT: j7WAiB-LWYp-rGy3-oQg3-BNfN-ss58-QnUFIG TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:219 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.362) 0:05:57.051 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Create a new volume group with a logical volume] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:224 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.086) 0:05:57.138 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:40:36 +0000 (0:00:00.179) 0:05:57.318 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:40:37 +0000 (0:00:00.116) 0:05:57.434 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:40:37 +0000 (0:00:00.413) 0:05:57.847 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:40:37 +0000 (0:00:00.250) 0:05:58.098 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:40:37 +0000 (0:00:00.085) 0:05:58.184 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:40:37 +0000 (0:00:00.085) 0:05:58.269 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.159) 0:05:58.428 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.083) 0:05:58.512 ********* ok: [sut] => { "storage_pools": [ { "disks": "sda", "encryption": false, "name": "foo", "volumes": [ { "name": "test", "size": "20m" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.085) 0:05:58.597 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.082) 0:05:58.679 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.082) 0:05:58.762 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.082) 0:05:58.845 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.082) 0:05:58.927 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.082) 0:05:59.009 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.105) 0:05:59.115 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:40:38 +0000 (0:00:00.040) 0:05:59.156 ********* changed: [sut] => { "actions": [ { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdh1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdi1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdd1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdf1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sde1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdc1", "name": "luks-16934d06-eaad-4d70-befb-6c32fba847bb", "password": "-", "state": "absent" }, { "backing_device": "/dev/sda1", "name": "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdh1", "name": "luks-d883cc77-3a08-4195-964a-a36eeeaba914", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdi1", "name": "luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdg1", "name": "luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdb1", "name": "luks-f3882e28-984d-41d5-82cf-57c1823616b6", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdd1", "name": "luks-0186f638-3b3f-4a95-82cc-442546e8c270", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdf1", "name": "luks-3cd56d06-12d0-4fad-9377-924c3397d046", "password": "-", "state": "absent" }, { "backing_device": "/dev/sde1", "name": "luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "password": "-", "state": "absent" } ], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/mapper/foo-test" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:40:48 +0000 (0:00:09.272) 0:06:08.429 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.084) 0:06:08.513 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.038) 0:06:08.552 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-16934d06-eaad-4d70-befb-6c32fba847bb", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdc1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-d883cc77-3a08-4195-964a-a36eeeaba914", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdh1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdi1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdg1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-f3882e28-984d-41d5-82cf-57c1823616b6", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-0186f638-3b3f-4a95-82cc-442546e8c270", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdd1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-3cd56d06-12d0-4fad-9377-924c3397d046", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sdf1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "disklabel" }, { "action": "destroy format", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/mapper/luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde1", "fs_type": "luks" }, { "action": "destroy device", "device": "/dev/sde1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "disklabel" }, { "action": "create format", "device": "/dev/sda", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sda1", "fs_type": null }, { "action": "create format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/foo", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/foo-test", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/foo-test", "fs_type": "xfs" } ], "changed": true, "crypts": [ { "backing_device": "/dev/sdc1", "name": "luks-16934d06-eaad-4d70-befb-6c32fba847bb", "password": "-", "state": "absent" }, { "backing_device": "/dev/sda1", "name": "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdh1", "name": "luks-d883cc77-3a08-4195-964a-a36eeeaba914", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdi1", "name": "luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdg1", "name": "luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdb1", "name": "luks-f3882e28-984d-41d5-82cf-57c1823616b6", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdd1", "name": "luks-0186f638-3b3f-4a95-82cc-442546e8c270", "password": "-", "state": "absent" }, { "backing_device": "/dev/sdf1", "name": "luks-3cd56d06-12d0-4fad-9377-924c3397d046", "password": "-", "state": "absent" }, { "backing_device": "/dev/sde1", "name": "luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "password": "-", "state": "absent" } ], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0", "/dev/mapper/foo-test" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.091) 0:06:08.643 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.140) 0:06:08.783 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.085) 0:06:08.868 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.040) 0:06:08.909 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.085) 0:06:08.995 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.042) 0:06:09.038 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.042) 0:06:09.080 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:40:48 +0000 (0:00:00.084) 0:06:09.165 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685579991.5868354, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ce88942f73cdbd601cde6da8f3c0c5644a2ae17c", "ctime": 1685579991.5858352, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263400, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0600", "mtime": 1685579991.5848353, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 486, "uid": 0, "version": "672474299", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:40:49 +0000 (0:00:00.282) 0:06:09.447 ********* changed: [sut] => (item={'backing_device': '/dev/sdc1', 'name': 'luks-16934d06-eaad-4d70-befb-6c32fba847bb', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdc1", "name": "luks-16934d06-eaad-4d70-befb-6c32fba847bb", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sda1', 'name': 'luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sda1", "name": "luks-64873b1e-1f7f-45fd-8324-4f067b7b29bf", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdh1', 'name': 'luks-d883cc77-3a08-4195-964a-a36eeeaba914', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdh1", "name": "luks-d883cc77-3a08-4195-964a-a36eeeaba914", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdi1', 'name': 'luks-0b33f446-abba-4842-8c74-24ef3a1656a0', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdi1", "name": "luks-0b33f446-abba-4842-8c74-24ef3a1656a0", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdg1', 'name': 'luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdg1", "name": "luks-c1147fea-f027-4101-8d2c-d41f7dea6f8a", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdb1', 'name': 'luks-f3882e28-984d-41d5-82cf-57c1823616b6', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdb1", "name": "luks-f3882e28-984d-41d5-82cf-57c1823616b6", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdd1', 'name': 'luks-0186f638-3b3f-4a95-82cc-442546e8c270', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdd1", "name": "luks-0186f638-3b3f-4a95-82cc-442546e8c270", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sdf1', 'name': 'luks-3cd56d06-12d0-4fad-9377-924c3397d046', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sdf1", "name": "luks-3cd56d06-12d0-4fad-9377-924c3397d046", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed changed: [sut] => (item={'backing_device': '/dev/sde1', 'name': 'luks-015dacde-2ae5-47eb-add7-fd80c781bea8', 'password': '-', 'state': 'absent'}) => { "ansible_loop_var": "entry", "backup": "", "changed": true, "entry": { "backing_device": "/dev/sde1", "name": "luks-015dacde-2ae5-47eb-add7-fd80c781bea8", "password": "-", "state": "absent" }, "found": 1 } MSG: 1 line(s) removed TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:40:51 +0000 (0:00:02.308) 0:06:11.755 ********* ok: [sut] TASK [Save UUID of the created volume group] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:236 Thursday 01 June 2023 00:40:52 +0000 (0:00:00.682) 0:06:12.438 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.034678", "end": "2023-06-01 00:40:52.273588", "rc": 0, "start": "2023-06-01 00:40:52.238910" } STDOUT: LxOaHe-7dyy-dG1i-T8vh-8lBp-GyH3-eV2Ikx TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:241 Thursday 01 June 2023 00:40:52 +0000 (0:00:00.303) 0:06:12.741 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:40:52 +0000 (0:00:00.143) 0:06:12.884 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": 0, "encryption_luks_version": "luks2", "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "", "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:40:52 +0000 (0:00:00.097) 0:06:12.981 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:40:52 +0000 (0:00:00.083) 0:06:13.065 ********* ok: [sut] => { "changed": false, "info": { "/dev/mapper/foo-test": { "fstype": "xfs", "label": "", "name": "/dev/mapper/foo-test", "size": "20M", "type": "lvm", "uuid": "d95162d9-afd8-42c5-a906-9f3b413491e4" }, "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sda1": { "fstype": "LVM2_member", "label": "", "name": "/dev/sda1", "size": "10G", "type": "partition", "uuid": "9XlYQk-ANvy-Tcxs-eJyN-uriz-J6tP-xQVwic" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:40:52 +0000 (0:00:00.277) 0:06:13.343 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003296", "end": "2023-06-01 00:40:53.175639", "rc": 0, "start": "2023-06-01 00:40:53.172343" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:40:53 +0000 (0:00:00.299) 0:06:13.642 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003210", "end": "2023-06-01 00:40:53.440872", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:40:53.437662" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:40:53 +0000 (0:00:00.264) 0:06:13.907 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:40:53 +0000 (0:00:00.115) 0:06:14.023 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:40:53 +0000 (0:00:00.079) 0:06:14.102 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:40:53 +0000 (0:00:00.160) 0:06:14.262 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "1", "_storage_test_pool_pvs_lvm": [ "/dev/sda1" ] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:40:53 +0000 (0:00:00.089) 0:06:14.352 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "device": "/dev/sda1", "pv": "/dev/sda1" } TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.268) 0:06:14.620 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "1" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.082) 0:06:14.702 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [ "/dev/sda1" ] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.083) 0:06:14.785 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.081) 0:06:14.867 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.135) 0:06:15.002 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.084) 0:06:15.087 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.078) 0:06:15.165 ********* ok: [sut] => (item=/dev/sda1) => { "ansible_loop_var": "pv", "changed": false, "pv": "/dev/sda1" } MSG: All assertions passed TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.089) 0:06:15.255 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:40:54 +0000 (0:00:00.116) 0:06:15.371 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.078) 0:06:15.450 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.079) 0:06:15.530 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.083) 0:06:15.613 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.081) 0:06:15.695 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.079) 0:06:15.774 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.080) 0:06:15.854 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.080) 0:06:15.935 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.151) 0:06:16.086 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.119) 0:06:16.206 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml for sut TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml:8 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.120) 0:06:16.326 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml:16 Thursday 01 June 2023 00:40:55 +0000 (0:00:00.078) 0:06:16.405 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml:21 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.077) 0:06:16.483 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set LV stripe size] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml:29 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.080) 0:06:16.563 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested stripe size] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml:34 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.080) 0:06:16.644 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected stripe size] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml:40 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.080) 0:06:16.725 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check stripe size] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-lvmraid.yml:46 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.079) 0:06:16.805 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.080) 0:06:16.885 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.157) 0:06:17.043 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-thin.yml for sut TASK [Get information about thinpool] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-thin.yml:8 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.120) 0:06:17.163 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in correct thinpool (when thinp name is provided)] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-thin.yml:16 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.079) 0:06:17.243 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check that volume is in thinpool (when thinp name is not provided)] ****** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-thin.yml:23 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.080) 0:06:17.323 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-thin.yml:27 Thursday 01 June 2023 00:40:56 +0000 (0:00:00.077) 0:06:17.401 ********* ok: [sut] => { "ansible_facts": { "storage_test_thin_status": null }, "changed": false } TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.079) 0:06:17.481 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.121) 0:06:17.602 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.082) 0:06:17.684 ********* skipping: [sut] => (item=/dev/sda1) => { "_storage_test_pool_member_path": "/dev/sda1", "ansible_loop_var": "_storage_test_pool_member_path", "changed": false, "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.083) 0:06:17.768 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml for sut TASK [Set variables used by tests] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:2 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.116) 0:06:17.884 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [] }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:9 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.166) 0:06:18.051 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:18 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.085) 0:06:18.136 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:27 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.081) 0:06:18.218 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:37 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.078) 0:06:18.297 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-crypttab.yml:47 Thursday 01 June 2023 00:40:57 +0000 (0:00:00.081) 0:06:18.378 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null }, "changed": false } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.080) 0:06:18.458 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.080) 0:06:18.539 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.125) 0:06:18.664 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml for sut TASK [Get information about VDO deduplication] ********************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml:9 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.128) 0:06:18.793 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml:16 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.082) 0:06:18.875 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml:22 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.078) 0:06:18.954 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get information about VDO compression] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml:28 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.162) 0:06:19.117 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is off] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml:35 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.082) 0:06:19.199 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check if VDO deduplication is on] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml:41 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.077) 0:06:19.276 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-vdo.yml:47 Thursday 01 June 2023 00:40:58 +0000 (0:00:00.077) 0:06:19.354 ********* ok: [sut] => { "ansible_facts": { "storage_test_vdo_status": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:40:59 +0000 (0:00:00.077) 0:06:19.432 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:40:59 +0000 (0:00:00.081) 0:06:19.513 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume.yml for sut TASK [Set storage volume test variables] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume.yml:2 Thursday 01 June 2023 00:40:59 +0000 (0:00:00.115) 0:06:19.629 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for {{ storage_test_volume_subset }}] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume.yml:21 Thursday 01 June 2023 00:40:59 +0000 (0:00:00.083) 0:06:19.712 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fstab.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fs.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml for sut TASK [Get expected mount device based on device type] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:7 Thursday 01 June 2023 00:40:59 +0000 (0:00:00.501) 0:06:20.213 ********* ok: [sut] => { "ansible_facts": { "storage_test_device_path": "/dev/mapper/foo-test" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:16 Thursday 01 June 2023 00:40:59 +0000 (0:00:00.083) 0:06:20.297 ********* ok: [sut] => { "ansible_facts": { "storage_test_mount_device_matches": [], "storage_test_mount_expected_match_count": "0", "storage_test_mount_point_matches": [], "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:33 Thursday 01 June 2023 00:40:59 +0000 (0:00:00.085) 0:06:20.382 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:46 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.079) 0:06:20.461 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by mount point] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:58 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.078) 0:06:20.540 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:66 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.087) 0:06:20.627 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:78 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.082) 0:06:20.710 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:90 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.078) 0:06:20.789 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the mount fs type] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:105 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.078) 0:06:20.868 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:117 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.078) 0:06:20.946 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:123 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.077) 0:06:21.024 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:129 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.077) 0:06:21.101 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-mount.yml:141 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.077) 0:06:21.178 ********* ok: [sut] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_device_matches": null, "storage_test_mount_expected_match_count": null, "storage_test_mount_point_matches": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fstab.yml:2 Thursday 01 June 2023 00:41:00 +0000 (0:00:00.077) 0:06:21.256 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fstab.yml:40 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.164) 0:06:21.420 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fstab.yml:48 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.082) 0:06:21.503 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fstab.yml:58 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.082) 0:06:21.585 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fstab.yml:71 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.079) 0:06:21.664 ********* ok: [sut] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fs.yml:3 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.079) 0:06:21.744 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify fs label] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-fs.yml:10 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.079) 0:06:21.824 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [See whether the device node is present] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml:3 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.082) 0:06:21.906 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685580047.8878407, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1685580047.8878407, "dev": 5, "device_type": 64768, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 5612, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1685580047.8878407, "nlink": 1, "path": "/dev/mapper/foo-test", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml:9 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.273) 0:06:22.180 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml:16 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.083) 0:06:22.263 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml:24 Thursday 01 June 2023 00:41:01 +0000 (0:00:00.081) 0:06:22.345 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml:30 Thursday 01 June 2023 00:41:02 +0000 (0:00:00.086) 0:06:22.432 ********* ok: [sut] => { "ansible_facts": { "st_volume_type": "lvm" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml:34 Thursday 01 June 2023 00:41:02 +0000 (0:00:00.084) 0:06:22.517 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-device.yml:39 Thursday 01 June 2023 00:41:02 +0000 (0:00:00.116) 0:06:22.633 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:3 Thursday 01 June 2023 00:41:02 +0000 (0:00:00.081) 0:06:22.715 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:10 Thursday 01 June 2023 00:41:02 +0000 (0:00:00.078) 0:06:22.793 ********* ok: [sut] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Collect LUKS info for this volume] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:15 Thursday 01 June 2023 00:41:04 +0000 (0:00:02.371) 0:06:25.165 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:21 Thursday 01 June 2023 00:41:04 +0000 (0:00:00.081) 0:06:25.246 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:30 Thursday 01 June 2023 00:41:04 +0000 (0:00:00.079) 0:06:25.326 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:43 Thursday 01 June 2023 00:41:04 +0000 (0:00:00.085) 0:06:25.411 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:49 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.079) 0:06:25.491 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:54 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.122) 0:06:25.613 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:67 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.079) 0:06:25.693 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:79 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.081) 0:06:25.775 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:92 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.080) 0:06:25.855 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:104 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.084) 0:06:25.940 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:112 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.083) 0:06:26.023 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:120 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.084) 0:06:26.108 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:129 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.084) 0:06:26.192 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-encryption.yml:138 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.084) 0:06:26.276 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml:8 Thursday 01 June 2023 00:41:05 +0000 (0:00:00.082) 0:06:26.359 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml:14 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.079) 0:06:26.438 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml:21 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.078) 0:06:26.516 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml:28 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.079) 0:06:26.596 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml:35 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.152) 0:06:26.749 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml:44 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.080) 0:06:26.830 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-md.yml:53 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.081) 0:06:26.911 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:3 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.079) 0:06:26.990 ********* ok: [sut] => { "bytes": 20971520, "changed": false, "lvm": "20m", "parted": "20MiB", "size": "20 MiB" } TASK [Parse the requested size of the volume] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:11 Thursday 01 June 2023 00:41:06 +0000 (0:00:00.313) 0:06:27.304 ********* ok: [sut] => { "bytes": 20971520, "changed": false, "lvm": "20m", "parted": "20MiB", "size": "20 MiB" } TASK [Establish base value for expected size] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:20 Thursday 01 June 2023 00:41:07 +0000 (0:00:00.263) 0:06:27.567 ********* ok: [sut] => { "ansible_facts": { "storage_test_expected_size": "20971520" }, "changed": false } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:28 Thursday 01 June 2023 00:41:07 +0000 (0:00:00.084) 0:06:27.652 ********* ok: [sut] => { "storage_test_expected_size": "20971520" } TASK [Get the size of parent/pool device] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:32 Thursday 01 June 2023 00:41:07 +0000 (0:00:00.080) 0:06:27.733 ********* ok: [sut] => { "bytes": 10715943403, "changed": false, "lvm": "9g", "parted": "9GiB", "size": "9 GiB" } TASK [Show test pool] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:46 Thursday 01 June 2023 00:41:07 +0000 (0:00:00.262) 0:06:27.996 ********* skipping: [sut] => {} TASK [Show test blockinfo] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:50 Thursday 01 June 2023 00:41:07 +0000 (0:00:00.088) 0:06:28.084 ********* skipping: [sut] => {} TASK [Show test pool size] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:54 Thursday 01 June 2023 00:41:07 +0000 (0:00:00.086) 0:06:28.171 ********* skipping: [sut] => {} TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:58 Thursday 01 June 2023 00:41:07 +0000 (0:00:00.172) 0:06:28.343 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:68 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.081) 0:06:28.425 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:72 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.080) 0:06:28.506 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:77 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.081) 0:06:28.587 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:83 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.084) 0:06:28.671 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:88 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.093) 0:06:28.765 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:96 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.084) 0:06:28.850 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:104 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.097) 0:06:28.948 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:109 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.085) 0:06:29.033 ********* skipping: [sut] => {} TASK [Show volume thin pool size] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:113 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.084) 0:06:29.117 ********* skipping: [sut] => {} TASK [Show test volume size] *************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:117 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.078) 0:06:29.195 ********* skipping: [sut] => {} TASK [Establish base value for expected thin pool size] ************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:121 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.079) 0:06:29.275 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:129 Thursday 01 June 2023 00:41:08 +0000 (0:00:00.078) 0:06:29.354 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:138 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.080) 0:06:29.434 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:142 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.114) 0:06:29.548 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:150 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.078) 0:06:29.627 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:156 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.080) 0:06:29.707 ********* ok: [sut] => { "storage_test_actual_size": { "bytes": 20971520, "changed": false, "failed": false, "lvm": "20m", "parted": "20MiB", "size": "20 MiB" } } TASK [Show expected size] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:160 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.081) 0:06:29.788 ********* ok: [sut] => { "storage_test_expected_size": "20971520" } TASK [Assert expected size is actual size] ************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-size.yml:164 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.079) 0:06:29.868 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Get information about the LV] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml:5 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.084) 0:06:29.952 ********* ok: [sut] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--nameprefixes", "--units=b", "--nosuffix", "--unquoted", "-o", "name,attr,cache_total_blocks,chunk_size,segtype", "foo/test" ], "delta": "0:00:00.036634", "end": "2023-06-01 00:41:09.791616", "rc": 0, "start": "2023-06-01 00:41:09.754982" } STDOUT: LVM2_LV_NAME=test LVM2_LV_ATTR=-wi-a----- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear TASK [Set LV segment type] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml:13 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.305) 0:06:30.258 ********* ok: [sut] => { "ansible_facts": { "storage_test_lv_segtype": [ "linear" ] }, "changed": false } TASK [Check segment type] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml:18 Thursday 01 June 2023 00:41:09 +0000 (0:00:00.082) 0:06:30.341 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set LV cache size] ******************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml:27 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.081) 0:06:30.423 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml:35 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.081) 0:06:30.505 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml:41 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.079) 0:06:30.585 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume-cache.yml:47 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.081) 0:06:30.666 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-volume.yml:27 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.152) 0:06:30.819 ********* ok: [sut] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.082) 0:06:30.901 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.037) 0:06:30.938 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Add a second PV to the VG] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:244 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.080) 0:06:31.019 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.166) 0:06:31.186 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:41:10 +0000 (0:00:00.107) 0:06:31.293 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:41:11 +0000 (0:00:00.407) 0:06:31.701 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:41:11 +0000 (0:00:00.289) 0:06:31.990 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:41:11 +0000 (0:00:00.083) 0:06:32.073 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:41:11 +0000 (0:00:00.167) 0:06:32.240 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:41:11 +0000 (0:00:00.124) 0:06:32.365 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.082) 0:06:32.447 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb" ], "name": "foo", "volumes": [ { "name": "test", "size": "20m" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.084) 0:06:32.531 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.081) 0:06:32.613 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.081) 0:06:32.694 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.078) 0:06:32.773 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.079) 0:06:32.852 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.079) 0:06:32.931 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.097) 0:06:33.028 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:41:12 +0000 (0:00:00.039) 0:06:33.068 ********* changed: [sut] => { "actions": [ { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/sdb1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/foo-test", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "xfsprogs", "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:41:15 +0000 (0:00:02.660) 0:06:35.729 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.081) 0:06:35.810 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.038) 0:06:35.848 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdb", "fs_type": "disklabel" }, { "action": "create device", "device": "/dev/sdb1", "fs_type": null }, { "action": "create format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "add container member", "device": "/dev/sdb1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/foo-test", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "xfsprogs", "lvm2", "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.088) 0:06:35.937 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.086) 0:06:36.024 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.083) 0:06:36.107 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.042) 0:06:36.149 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.081) 0:06:36.230 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.072) 0:06:36.303 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:41:15 +0000 (0:00:00.041) 0:06:36.344 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:41:16 +0000 (0:00:00.084) 0:06:36.429 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685580053.4398413, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1685580051.2728412, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263404, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1685580051.2718413, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "621089841", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:41:16 +0000 (0:00:00.271) 0:06:36.700 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:41:16 +0000 (0:00:00.038) 0:06:36.739 ********* ok: [sut] TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:255 Thursday 01 June 2023 00:41:16 +0000 (0:00:00.670) 0:06:37.410 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.035950", "end": "2023-06-01 00:41:17.244344", "rc": 0, "start": "2023-06-01 00:41:17.208394" } STDOUT: LxOaHe-7dyy-dG1i-T8vh-8lBp-GyH3-eV2Ikx TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:260 Thursday 01 June 2023 00:41:17 +0000 (0:00:00.300) 0:06:37.711 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Remove the first PV from the VG] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:265 Thursday 01 June 2023 00:41:17 +0000 (0:00:00.081) 0:06:37.792 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:41:17 +0000 (0:00:00.207) 0:06:38.000 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:41:17 +0000 (0:00:00.111) 0:06:38.111 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.405) 0:06:38.516 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.250) 0:06:38.767 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.081) 0:06:38.848 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.081) 0:06:38.930 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.119) 0:06:39.049 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.079) 0:06:39.129 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sdb" ], "name": "foo", "volumes": [ { "name": "test", "size": "20m" } ] } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.082) 0:06:39.211 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:41:18 +0000 (0:00:00.080) 0:06:39.292 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:41:19 +0000 (0:00:00.150) 0:06:39.443 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:41:19 +0000 (0:00:00.081) 0:06:39.524 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:41:19 +0000 (0:00:00.077) 0:06:39.602 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:41:19 +0000 (0:00:00.077) 0:06:39.679 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:41:19 +0000 (0:00:00.097) 0:06:39.777 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:41:19 +0000 (0:00:00.037) 0:06:39.815 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/mapper/foo-test", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:41:23 +0000 (0:00:03.637) 0:06:43.452 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.080) 0:06:43.533 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.037) 0:06:43.570 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/sda1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sda1", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/mapper/foo-test", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs", "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.087) 0:06:43.657 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/foo-test", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/foo-test", "_raw_device": "/dev/mapper/foo-test", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": null, "mount_user": null, "name": "test", "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "20m", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.081) 0:06:43.738 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.081) 0:06:43.820 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.039) 0:06:43.860 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.077) 0:06:43.937 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.045) 0:06:43.982 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.041) 0:06:44.024 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.078) 0:06:44.103 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685580053.4398413, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1685580051.2728412, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263404, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1685580051.2718413, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "621089841", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:41:23 +0000 (0:00:00.299) 0:06:44.403 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:41:24 +0000 (0:00:00.039) 0:06:44.442 ********* ok: [sut] TASK [Get UUID of the 'foo' volume group] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:276 Thursday 01 June 2023 00:41:24 +0000 (0:00:00.680) 0:06:45.123 ********* ok: [sut] => { "changed": false, "cmd": [ "vgs", "--noheading", "-o", "vg_uuid", "foo" ], "delta": "0:00:00.036780", "end": "2023-06-01 00:41:24.956681", "rc": 0, "start": "2023-06-01 00:41:24.919901" } STDOUT: LxOaHe-7dyy-dG1i-T8vh-8lBp-GyH3-eV2Ikx TASK [Make sure the VG UUID didn't change (VG wasn't removed)] ***************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:281 Thursday 01 June 2023 00:41:25 +0000 (0:00:00.298) 0:06:45.422 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Clean up] **************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:286 Thursday 01 June 2023 00:41:25 +0000 (0:00:00.079) 0:06:45.501 ********* TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:2 Thursday 01 June 2023 00:41:25 +0000 (0:00:00.169) 0:06:45.671 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml for sut TASK [linux-system-roles.storage : Ensure ansible_facts used by role] ********** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:2 Thursday 01 June 2023 00:41:25 +0000 (0:00:00.110) 0:06:45.782 ********* ok: [sut] TASK [linux-system-roles.storage : Set platform/version specific variables] **** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/set_vars.yml:8 Thursday 01 June 2023 00:41:25 +0000 (0:00:00.440) 0:06:46.223 ********* skipping: [sut] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml", "skip_reason": "Conditional result was False" } ok: [sut] => (item=Fedora.yml) => { "ansible_facts": { "_storage_copr_packages": [ { "packages": [ "vdo", "kmod-vdo" ], "repository": "rhawalsh/dm-vdo" } ], "_storage_copr_support_packages": [ "dnf-plugins-core" ], "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap" ] }, "ansible_included_var_files": [ "/WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/vars/Fedora.yml" ], "ansible_loop_var": "item", "changed": false, "item": "Fedora.yml" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } skipping: [sut] => (item=Fedora_36.yml) => { "ansible_loop_var": "item", "changed": false, "item": "Fedora_36.yml", "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Define an empty list of pools to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:5 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.241) 0:06:46.464 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [linux-system-roles.storage : Define an empty list of volumes to be used in testing] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:9 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.082) 0:06:46.546 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Include the appropriate provider tasks] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main.yml:13 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.085) 0:06:46.631 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml for sut TASK [linux-system-roles.storage : Make sure blivet is available] ************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.126) 0:06:46.758 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Show storage_pools] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:9 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.087) 0:06:46.846 ********* ok: [sut] => { "storage_pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo", "state": "absent" } ] } TASK [linux-system-roles.storage : Show storage_volumes] *********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:14 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.086) 0:06:46.933 ********* ok: [sut] => { "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined" } TASK [linux-system-roles.storage : Get required packages] ********************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:19 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.092) 0:06:47.025 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Enable copr repositories if needed] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:32 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.079) 0:06:47.105 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Make sure required packages are installed] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:39 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.081) 0:06:47.187 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Get service facts] ************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:46 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.079) 0:06:47.267 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set storage_cryptsetup_services] ************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:53 Thursday 01 June 2023 00:41:26 +0000 (0:00:00.080) 0:06:47.347 ********* ok: [sut] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [linux-system-roles.storage : Mask the systemd cryptsetup services] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:67 Thursday 01 June 2023 00:41:27 +0000 (0:00:00.131) 0:06:47.479 ********* TASK [linux-system-roles.storage : Manage the pools and volumes to match the specified state] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Thursday 01 June 2023 00:41:27 +0000 (0:00:00.037) 0:06:47.516 ********* changed: [sut] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/foo-test", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [] } ], "volumes": [] } TASK [linux-system-roles.storage : Workaround for udev issue on some platforms] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:87 Thursday 01 June 2023 00:41:29 +0000 (0:00:02.431) 0:06:49.947 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Unmask the systemd cryptsetup services] ***** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:99 Thursday 01 June 2023 00:41:29 +0000 (0:00:00.082) 0:06:50.029 ********* TASK [linux-system-roles.storage : Show blivet_output] ************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:105 Thursday 01 June 2023 00:41:29 +0000 (0:00:00.037) 0:06:50.067 ********* ok: [sut] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/foo-test", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/foo-test", "fs_type": null }, { "action": "destroy device", "device": "/dev/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb1", "fs_type": "lvmpv" }, { "action": "destroy device", "device": "/dev/sdb1", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "disklabel" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/zram0" ], "mounts": [], "packages": [ "e2fsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [] } ], "volumes": [] } } TASK [linux-system-roles.storage : Set the list of pools for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:110 Thursday 01 June 2023 00:41:29 +0000 (0:00:00.083) 0:06:50.150 ********* ok: [sut] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [] } ] }, "changed": false } TASK [linux-system-roles.storage : Set the list of volumes for test verification] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:114 Thursday 01 June 2023 00:41:29 +0000 (0:00:00.086) 0:06:50.237 ********* ok: [sut] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [linux-system-roles.storage : Remove obsolete mounts] ********************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:130 Thursday 01 June 2023 00:41:29 +0000 (0:00:00.134) 0:06:50.372 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:142 Thursday 01 June 2023 00:41:29 +0000 (0:00:00.040) 0:06:50.413 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Set up new/current mounts] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:147 Thursday 01 June 2023 00:41:30 +0000 (0:00:00.080) 0:06:50.493 ********* TASK [linux-system-roles.storage : Manage mount ownership/permissions] ********* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:159 Thursday 01 June 2023 00:41:30 +0000 (0:00:00.040) 0:06:50.533 ********* TASK [linux-system-roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:174 Thursday 01 June 2023 00:41:30 +0000 (0:00:00.039) 0:06:50.573 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [linux-system-roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:182 Thursday 01 June 2023 00:41:30 +0000 (0:00:00.082) 0:06:50.656 ********* ok: [sut] => { "changed": false, "stat": { "atime": 1685580053.4398413, "attr_flags": "e", "attributes": [ "extents" ], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1685580051.2728412, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 263404, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1685580051.2718413, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "621089841", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [linux-system-roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:187 Thursday 01 June 2023 00:41:30 +0000 (0:00:00.273) 0:06:50.929 ********* TASK [linux-system-roles.storage : Update facts] ******************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:209 Thursday 01 June 2023 00:41:30 +0000 (0:00:00.038) 0:06:50.968 ********* ok: [sut] TASK [Verify role results] ***************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/tests_lvm_pool_members.yml:295 Thursday 01 June 2023 00:41:31 +0000 (0:00:00.687) 0:06:51.656 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml for sut TASK [Print out pool information] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:2 Thursday 01 June 2023 00:41:31 +0000 (0:00:00.182) 0:06:51.838 ********* ok: [sut] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "state": "absent", "type": "lvm", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:7 Thursday 01 June 2023 00:41:31 +0000 (0:00:00.085) 0:06:51.924 ********* skipping: [sut] => {} TASK [Collect info about the volumes.] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:15 Thursday 01 June 2023 00:41:31 +0000 (0:00:00.083) 0:06:52.008 ********* ok: [sut] => { "changed": false, "info": { "/dev/sda": { "fstype": "", "label": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "ext4", "label": "", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "f91a7ec7-5021-4d03-b280-c7f5e8053b5f" }, "/dev/zram0": { "fstype": "", "label": "", "name": "/dev/zram0", "size": "3.6G", "type": "disk", "uuid": "" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:20 Thursday 01 June 2023 00:41:31 +0000 (0:00:00.273) 0:06:52.281 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003481", "end": "2023-06-01 00:41:32.080757", "rc": 0, "start": "2023-06-01 00:41:32.077276" } STDOUT: # # /etc/fstab # Created by anaconda on Tue May 16 13:36:23 2023 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=f91a7ec7-5021-4d03-b280-c7f5e8053b5f / ext4 defaults 1 1 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:25 Thursday 01 June 2023 00:41:32 +0000 (0:00:00.268) 0:06:52.549 ********* ok: [sut] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003326", "end": "2023-06-01 00:41:32.349889", "failed_when_result": false, "rc": 0, "start": "2023-06-01 00:41:32.346563" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:34 Thursday 01 June 2023 00:41:32 +0000 (0:00:00.267) 0:06:52.817 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml for sut TASK [Set _storage_pool_tests] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:5 Thursday 01 June 2023 00:41:32 +0000 (0:00:00.117) 0:06:52.935 ********* ok: [sut] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Verify pool subset] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool.yml:18 Thursday 01 June 2023 00:41:32 +0000 (0:00:00.080) 0:06:53.015 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml for sut included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:2 Thursday 01 June 2023 00:41:32 +0000 (0:00:00.156) 0:06:53.172 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_count": "0", "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Get the canonical device path for each member device] ******************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:13 Thursday 01 June 2023 00:41:32 +0000 (0:00:00.087) 0:06:53.260 ********* TASK [Set pvs lvm length] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:22 Thursday 01 June 2023 00:41:32 +0000 (0:00:00.072) 0:06:53.332 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": "0" }, "changed": false } TASK [Set pool pvs] ************************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:27 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.085) 0:06:53.418 ********* ok: [sut] => { "ansible_facts": { "_storage_test_pool_pvs": [] }, "changed": false } TASK [Verify PV count] ********************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:33 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.081) 0:06:53.499 ********* ok: [sut] => { "changed": false } MSG: All assertions passed TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:42 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.083) 0:06:53.583 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "disk" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:48 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.081) 0:06:53.664 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_pv_type": "partition" }, "changed": false } TASK [Set expected pv type] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:54 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.085) 0:06:53.750 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:59 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.079) 0:06:53.829 ********* TASK [Check MD RAID] *********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:73 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.037) 0:06:53.867 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml for sut TASK [Get information about RAID] ********************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:8 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.117) 0:06:53.984 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:14 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.080) 0:06:54.065 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:21 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.077) 0:06:54.143 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:28 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.077) 0:06:54.220 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:35 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.110) 0:06:54.330 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:45 Thursday 01 June 2023 00:41:33 +0000 (0:00:00.077) 0:06:54.408 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:55 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.079) 0:06:54.487 ********* skipping: [sut] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-md.yml:66 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.077) 0:06:54.564 ********* ok: [sut] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:76 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.079) 0:06:54.644 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml for sut TASK [Validate pool member LVM RAID settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-lvmraid.yml:2 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.117) 0:06:54.761 ********* TASK [Check Thin Pools] ******************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:79 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.036) 0:06:54.798 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml for sut TASK [Validate pool member thinpool settings] ********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-thin.yml:2 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.118) 0:06:54.916 ********* TASK [Check member encryption] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:82 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.037) 0:06:54.954 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml for sut TASK [Set test variables] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:5 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.119) 0:06:55.073 ********* ok: [sut] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:13 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.081) 0:06:55.155 ********* TASK [Validate pool member crypttab entries] *********************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:20 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.069) 0:06:55.224 ********* TASK [Clear test variables] **************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-encryption.yml:27 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.037) 0:06:55.262 ********* ok: [sut] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:85 Thursday 01 June 2023 00:41:34 +0000 (0:00:00.078) 0:06:55.341 ********* included: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml for sut TASK [Validate pool member VDO settings] *************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-members-vdo.yml:2 Thursday 01 June 2023 00:41:35 +0000 (0:00:00.118) 0:06:55.460 ********* TASK [Clean up test variables] ************************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-members.yml:88 Thursday 01 June 2023 00:41:35 +0000 (0:00:00.037) 0:06:55.497 ********* ok: [sut] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/test-verify-pool-volumes.yml:3 Thursday 01 June 2023 00:41:35 +0000 (0:00:00.078) 0:06:55.576 ********* TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:44 Thursday 01 June 2023 00:41:35 +0000 (0:00:00.036) 0:06:55.613 ********* TASK [Clean up variable namespace] ********************************************* task path: /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-role-results.yml:54 Thursday 01 June 2023 00:41:35 +0000 (0:00:00.034) 0:06:55.648 ********* ok: [sut] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } META: ran handlers META: ran handlers PLAY RECAP ********************************************************************* sut : ok=724 changed=17 unreachable=0 failed=0 skipped=482 rescued=0 ignored=0 Thursday 01 June 2023 00:41:35 +0000 (0:00:00.050) 0:06:55.698 ********* =============================================================================== linux-system-roles.storage : Manage the pools and volumes to match the specified state -- 88.74s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Manage the pools and volumes to match the specified state -- 78.87s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Manage the pools and volumes to match the specified state -- 11.50s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 9.27s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 8.05s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Make sure blivet is available -------------- 7.81s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:2 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 7.10s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 3.64s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Ensure cryptsetup is present -------------------------------------------- 3.63s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 3.62s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 3.51s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Ensure cryptsetup is present -------------------------------------------- 3.18s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 2.92s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 2.66s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 Ensure cryptsetup is present -------------------------------------------- 2.46s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Ensure cryptsetup is present -------------------------------------------- 2.46s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Ensure cryptsetup is present -------------------------------------------- 2.45s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Ensure cryptsetup is present -------------------------------------------- 2.45s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 Ensure cryptsetup is present -------------------------------------------- 2.45s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/verify-pool-member-encryption.yml:12 linux-system-roles.storage : Manage the pools and volumes to match the specified state --- 2.43s /WORKDIR/git-main_lvm-raid-stripe-sizeq7sss_ht/tests/roles/linux-system-roles.storage/tasks/main-blivet.yml:73 ---^---^---^---^---^--- # STDERR: ---v---v---v---v---v--- /opt/ansible-2.9/lib/python3.6/site-packages/ansible/parsing/vault/__init__.py:44: CryptographyDeprecationWarning: Python 3.6 is no longer supported by the Python core team. Therefore, support for it is deprecated in cryptography. The next release of cryptography will remove support for Python 3.6. from cryptography.exceptions import InvalidSignature [DEPRECATION WARNING]: Distribution fedora 36 on host sut should use /usr/bin/python3, but is using /usr/bin/python for backward compatibility with prior Ansible releases. A future Ansible release will default to using the discovered platform python for this host. See https://docs.ansible.com/ansible/ 2.9/reference_appendices/interpreter_discovery.html for more information. This feature will be removed in version 2.12. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ---^---^---^---^---^---