ansible-playbook 2.9.27
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.9/site-packages/ansible
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.9.19 (main, May 16 2024, 11:40:09) [GCC 8.5.0 20210514 (Red Hat 8.5.0-22)]
No config file found; using defaults
[WARNING]: running playbook inside collection fedora.linux_system_roles
Skipping callback 'actionable', as we already have a stdout callback.
Skipping callback 'counter_enabled', as we already have a stdout callback.
Skipping callback 'debug', as we already have a stdout callback.
Skipping callback 'dense', as we already have a stdout callback.
Skipping callback 'dense', as we already have a stdout callback.
Skipping callback 'full_skip', as we already have a stdout callback.
Skipping callback 'json', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'null', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
Skipping callback 'selective', as we already have a stdout callback.
Skipping callback 'skippy', as we already have a stdout callback.
Skipping callback 'stderr', as we already have a stdout callback.
Skipping callback 'unixy', as we already have a stdout callback.
Skipping callback 'yaml', as we already have a stdout callback.

PLAYBOOK: tests_change_mount.yml ***********************************************
1 plays in /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml

PLAY [Test change mount] *******************************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:2
Saturday 17 August 2024  18:25:45 -0400 (0:00:00.019)       0:00:00.019 ******* 
ok: [managed_node3]
META: ran handlers

TASK [Run the role] ************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:13
Saturday 17 August 2024  18:25:46 -0400 (0:00:01.109)       0:00:01.129 ******* 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 17 August 2024  18:25:46 -0400 (0:00:00.041)       0:00:01.171 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 17 August 2024  18:25:46 -0400 (0:00:00.044)       0:00:01.216 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 17 August 2024  18:25:46 -0400 (0:00:00.053)       0:00:01.269 ******* 
skipping: [managed_node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed_node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 17 August 2024  18:25:47 -0400 (0:00:00.101)       0:00:01.371 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 17 August 2024  18:25:47 -0400 (0:00:00.478)       0:00:01.850 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 17 August 2024  18:25:47 -0400 (0:00:00.040)       0:00:01.890 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 17 August 2024  18:25:47 -0400 (0:00:00.014)       0:00:01.905 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 17 August 2024  18:25:47 -0400 (0:00:00.015)       0:00:01.921 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 17 August 2024  18:25:47 -0400 (0:00:00.087)       0:00:02.008 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kmod-kvdo libblockdev libblockdev-crypto libblockdev-dm libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 17 August 2024  18:25:50 -0400 (0:00:02.923)       0:00:04.932 ******* 
ok: [managed_node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 17 August 2024  18:25:50 -0400 (0:00:00.064)       0:00:04.997 ******* 
ok: [managed_node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 17 August 2024  18:25:50 -0400 (0:00:00.076)       0:00:05.073 ******* 
ok: [managed_node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 17 August 2024  18:25:51 -0400 (0:00:00.711)       0:00:05.784 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Saturday 17 August 2024  18:25:51 -0400 (0:00:00.077)       0:00:05.862 ******* 

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Saturday 17 August 2024  18:25:51 -0400 (0:00:00.022)       0:00:05.885 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Saturday 17 August 2024  18:25:51 -0400 (0:00:00.025)       0:00:05.910 ******* 

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 17 August 2024  18:25:51 -0400 (0:00:00.018)       0:00:05.928 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kpartx

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 17 August 2024  18:25:54 -0400 (0:00:02.822)       0:00:08.750 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "chrony-dnssrv@.service": {
                "name": "chrony-dnssrv@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "cpupower.service": {
                "name": "cpupower.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus-org.freedesktop.portable1.service": {
                "name": "dbus-org.freedesktop.portable1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "halt-local.service": {
                "name": "halt-local.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "import-state.service": {
                "name": "import-state.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "iprdump.service": {
                "name": "iprdump.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprinit.service": {
                "name": "iprinit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "iprupdate.service": {
                "name": "iprupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "loadmodules.service": {
                "name": "loadmodules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "lvm2-pvscan@.service": {
                "name": "lvm2-pvscan@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "lvm2-pvscan@8:0.service": {
                "name": "lvm2-pvscan@8:0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "messagebus.service": {
                "name": "messagebus.service",
                "source": "systemd",
                "state": "active",
                "status": "static"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "ndctl-monitor.service": {
                "name": "ndctl-monitor.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-convert.service": {
                "name": "nfs-convert.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "oddjobd.service": {
                "name": "oddjobd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "plymouth-halt.service": {
                "name": "plymouth-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-kexec.service": {
                "name": "plymouth-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-poweroff.service": {
                "name": "plymouth-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-quit.service": {
                "name": "plymouth-quit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-read-write.service": {
                "name": "plymouth-read-write.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-reboot.service": {
                "name": "plymouth-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-switch-root-initramfs.service": {
                "name": "plymouth-switch-root-initramfs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-switch-root.service": {
                "name": "plymouth-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rdisc.service": {
                "name": "rdisc.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "active",
                "status": "enabled"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-plymouth.service": {
                "name": "systemd-ask-password-plymouth.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume@.service": {
                "name": "systemd-hibernate-resume@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-portabled.service": {
                "name": "systemd-portabled.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-resolved.service": {
                "name": "systemd-resolved.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "masked"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "tcsd.service": {
                "name": "tcsd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "teamd@.service": {
                "name": "teamd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "timedatex.service": {
                "name": "timedatex.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "tuned.service": {
                "name": "tuned.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "unbound-anchor.service": {
                "name": "unbound-anchor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "unknown"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "unknown"
            },
            "vdo-start-by-dev@.service": {
                "name": "vdo-start-by-dev@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "vdo.service": {
                "name": "vdo.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 17 August 2024  18:25:56 -0400 (0:00:01.749)       0:00:10.500 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 17 August 2024  18:25:56 -0400 (0:00:00.048)       0:00:10.549 ******* 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 17 August 2024  18:25:56 -0400 (0:00:00.033)       0:00:10.582 ******* 
ok: [managed_node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 17 August 2024  18:25:56 -0400 (0:00:00.663)       0:00:11.246 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 17 August 2024  18:25:56 -0400 (0:00:00.036)       0:00:11.283 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933508.793445,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "ab8070345774adad92683e9645714452be7be474",
        "ctime": 1723933508.3984394,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 316670089,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1723933508.3984394,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1343,
        "uid": 0,
        "version": "1783227297",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.507)       0:00:11.790 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.033)       0:00:11.824 ******* 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.029)       0:00:11.853 ******* 
ok: [managed_node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.039)       0:00:11.892 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.039)       0:00:11.932 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.047)       0:00:11.979 ******* 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.080)       0:00:12.059 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.063)       0:00:12.122 ******* 

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.030)       0:00:12.153 ******* 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.036)       0:00:12.189 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 17 August 2024  18:25:57 -0400 (0:00:00.039)       0:00:12.229 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933211.7108438,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 17 August 2024  18:25:58 -0400 (0:00:00.370)       0:00:12.599 ******* 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 17 August 2024  18:25:58 -0400 (0:00:00.018)       0:00:12.618 ******* 
ok: [managed_node3]

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:17
Saturday 17 August 2024  18:25:59 -0400 (0:00:00.734)       0:00:13.352 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Get unused disks] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:24
Saturday 17 August 2024  18:25:59 -0400 (0:00:00.025)       0:00:13.377 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed_node3

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Saturday 17 August 2024  18:25:59 -0400 (0:00:00.029)       0:00:13.407 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: util-linux

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Saturday 17 August 2024  18:26:01 -0400 (0:00:02.851)       0:00:16.259 ******* 
ok: [managed_node3] => {
    "changed": false,
    "disks": [
        "sda"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.636)       0:00:16.895 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.032)       0:00:16.927 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.044)       0:00:16.972 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.029)       0:00:17.001 ******* 
ok: [managed_node3] => {
    "unused_disks": [
        "sda"
    ]
}

TASK [Create a LVM logical volume mounted at /opt/test1] ***********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:30
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.035)       0:00:17.037 ******* 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.071)       0:00:17.109 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.042)       0:00:17.151 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.064)       0:00:17.216 ******* 
skipping: [managed_node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed_node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 17 August 2024  18:26:02 -0400 (0:00:00.093)       0:00:17.309 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.038)       0:00:17.347 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.036)       0:00:17.384 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.030)       0:00:17.415 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.032)       0:00:17.447 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.078)       0:00:17.525 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.033)       0:00:17.558 ******* 
ok: [managed_node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "3g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.036)       0:00:17.595 ******* 
ok: [managed_node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.030)       0:00:17.626 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.022)       0:00:17.648 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.026)       0:00:17.674 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.041)       0:00:17.716 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.066)       0:00:17.783 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.052)       0:00:17.836 ******* 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 17 August 2024  18:26:03 -0400 (0:00:00.025)       0:00:17.861 ******* 
changed: [managed_node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 17 August 2024  18:26:08 -0400 (0:00:04.490)       0:00:22.351 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 17 August 2024  18:26:08 -0400 (0:00:00.020)       0:00:22.372 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933508.793445,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "ab8070345774adad92683e9645714452be7be474",
        "ctime": 1723933508.3984394,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 316670089,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1723933508.3984394,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1343,
        "uid": 0,
        "version": "1783227297",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 17 August 2024  18:26:08 -0400 (0:00:00.361)       0:00:22.734 ******* 
ok: [managed_node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 17 August 2024  18:26:08 -0400 (0:00:00.485)       0:00:23.219 ******* 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 17 August 2024  18:26:08 -0400 (0:00:00.020)       0:00:23.240 ******* 
ok: [managed_node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 17 August 2024  18:26:08 -0400 (0:00:00.024)       0:00:23.264 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 17 August 2024  18:26:08 -0400 (0:00:00.023)       0:00:23.288 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 17 August 2024  18:26:08 -0400 (0:00:00.022)       0:00:23.311 ******* 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 17 August 2024  18:26:09 -0400 (0:00:00.021)       0:00:23.333 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 17 August 2024  18:26:09 -0400 (0:00:00.931)       0:00:24.264 ******* 
changed: [managed_node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 17 August 2024  18:26:10 -0400 (0:00:00.501)       0:00:24.765 ******* 
skipping: [managed_node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 17 August 2024  18:26:10 -0400 (0:00:00.032)       0:00:24.798 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 17 August 2024  18:26:11 -0400 (0:00:00.598)       0:00:25.397 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933211.7108438,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 17 August 2024  18:26:11 -0400 (0:00:00.395)       0:00:25.793 ******* 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 17 August 2024  18:26:11 -0400 (0:00:00.024)       0:00:25.817 ******* 
ok: [managed_node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:42
Saturday 17 August 2024  18:26:12 -0400 (0:00:00.720)       0:00:26.538 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed_node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 17 August 2024  18:26:12 -0400 (0:00:00.035)       0:00:26.574 ******* 
ok: [managed_node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 17 August 2024  18:26:12 -0400 (0:00:00.086)       0:00:26.660 ******* 
skipping: [managed_node3] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 17 August 2024  18:26:12 -0400 (0:00:00.027)       0:00:26.688 ******* 
ok: [managed_node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "3G",
            "type": "lvm",
            "uuid": "fae1f88e-f90d-4aeb-9c44-8a9fc134e1da"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "tmL8Zr-ptcF-EfsK-KB6c-NeVU-jo8v-UeqVze"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "fe591198-9082-4b15-9b62-e83518524cd2"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 17 August 2024  18:26:12 -0400 (0:00:00.512)       0:00:27.200 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002713",
    "end": "2024-08-17 18:26:13.253079",
    "rc": 0,
    "start": "2024-08-17 18:26:13.250366"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed May 29 07:43:06 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=fe591198-9082-4b15-9b62-e83518524cd2 /                       xfs     defaults        0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
/dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 17 August 2024  18:26:13 -0400 (0:00:00.443)       0:00:27.643 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002424",
    "end": "2024-08-17 18:26:13.603369",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-08-17 18:26:13.600945"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 17 August 2024  18:26:13 -0400 (0:00:00.347)       0:00:27.991 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed_node3

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 17 August 2024  18:26:13 -0400 (0:00:00.057)       0:00:28.049 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 17 August 2024  18:26:13 -0400 (0:00:00.021)       0:00:28.071 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.022995",
    "end": "2024-08-17 18:26:14.056428",
    "rc": 0,
    "start": "2024-08-17 18:26:14.033433"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.366)       0:00:28.437 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.034)       0:00:28.472 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.057)       0:00:28.529 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.036)       0:00:28.566 ******* 
ok: [managed_node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.576)       0:00:29.142 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.042)       0:00:29.185 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.043)       0:00:29.229 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.042)       0:00:29.271 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 17 August 2024  18:26:14 -0400 (0:00:00.030)       0:00:29.302 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.029)       0:00:29.331 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.022)       0:00:29.353 ******* 
ok: [managed_node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.055)       0:00:29.409 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

False



STDERR:

Shared connection to 10.31.45.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.414)       0:00:29.824 ******* 
skipping: [managed_node3] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.033)       0:00:29.858 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed_node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.041)       0:00:29.899 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.020)       0:00:29.919 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.020)       0:00:29.940 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.019)       0:00:29.960 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.019)       0:00:29.980 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.020)       0:00:30.000 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.027)       0:00:30.027 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.027)       0:00:30.055 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.040)       0:00:30.095 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.101)       0:00:30.197 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.033)       0:00:30.230 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 17 August 2024  18:26:15 -0400 (0:00:00.083)       0:00:30.313 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed_node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.086)       0:00:30.400 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed_node3

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.093)       0:00:30.494 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.044)       0:00:30.538 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.031)       0:00:30.569 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.070)       0:00:30.640 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.050)       0:00:30.690 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.035)       0:00:30.726 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.053)       0:00:30.779 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.031)       0:00:30.810 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed_node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.086)       0:00:30.897 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed_node3

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.067)       0:00:30.965 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.033)       0:00:30.999 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.057)       0:00:31.056 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.037)       0:00:31.093 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.034)       0:00:31.128 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.087)       0:00:31.216 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.036)       0:00:31.253 ******* 
skipping: [managed_node3] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 17 August 2024  18:26:16 -0400 (0:00:00.038)       0:00:31.291 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed_node3

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.063)       0:00:31.355 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.037)       0:00:31.393 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.036)       0:00:31.430 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.033)       0:00:31.463 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.035)       0:00:31.498 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.035)       0:00:31.533 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.032)       0:00:31.566 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.046)       0:00:31.613 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed_node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.090)       0:00:31.704 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed_node3

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.066)       0:00:31.770 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.051)       0:00:31.821 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.049)       0:00:31.870 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.070)       0:00:31.941 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.052)       0:00:31.994 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.062)       0:00:32.057 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.047)       0:00:32.104 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.030)       0:00:32.134 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed_node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.079)       0:00:32.213 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.056)       0:00:32.270 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 17 August 2024  18:26:17 -0400 (0:00:00.040)       0:00:32.310 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.063)       0:00:32.373 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.039)       0:00:32.412 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.039)       0:00:32.452 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.072)       0:00:32.524 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.076)       0:00:32.601 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed_node3

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.107)       0:00:32.708 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.067)       0:00:32.775 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed_node3

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.333)       0:00:33.109 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.067)       0:00:33.176 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.076)       0:00:33.253 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 17 August 2024  18:26:18 -0400 (0:00:00.037)       0:00:33.290 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.055)       0:00:33.346 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.051)       0:00:33.397 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.073)       0:00:33.471 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.068)       0:00:33.539 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.068)       0:00:33.607 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.038)       0:00:33.646 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.056)       0:00:33.703 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.069)       0:00:33.772 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.070)       0:00:33.842 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.094)       0:00:33.936 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.069)       0:00:34.006 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.031)       0:00:34.037 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.074)       0:00:34.112 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.050)       0:00:34.163 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.070)       0:00:34.234 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 17 August 2024  18:26:19 -0400 (0:00:00.086)       0:00:34.320 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933567.926009,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1723933567.926009,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 124319,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1723933567.926009,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.614)       0:00:34.935 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.051)       0:00:34.986 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.053)       0:00:35.039 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.037)       0:00:35.077 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.040)       0:00:35.117 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.039)       0:00:35.157 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.040)       0:00:35.197 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 17 August 2024  18:26:20 -0400 (0:00:00.031)       0:00:35.229 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 17 August 2024  18:26:23 -0400 (0:00:03.038)       0:00:38.267 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 17 August 2024  18:26:23 -0400 (0:00:00.021)       0:00:38.289 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 17 August 2024  18:26:23 -0400 (0:00:00.021)       0:00:38.311 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.029)       0:00:38.340 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.020)       0:00:38.361 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.020)       0:00:38.382 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.021)       0:00:38.404 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.021)       0:00:38.425 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.023)       0:00:38.449 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.027)       0:00:38.476 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.025)       0:00:38.502 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.021)       0:00:38.523 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.021)       0:00:38.544 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.022)       0:00:38.566 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.023)       0:00:38.589 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.027)       0:00:38.617 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.033)       0:00:38.650 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.031)       0:00:38.681 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.031)       0:00:38.712 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.032)       0:00:38.745 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.033)       0:00:38.778 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.035)       0:00:38.814 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.032)       0:00:38.846 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.032)       0:00:38.879 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 17 August 2024  18:26:24 -0400 (0:00:00.032)       0:00:38.911 ******* 
ok: [managed_node3] => {
    "bytes": 3221225472,
    "changed": false,
    "lvm": "3g",
    "parted": "3GiB",
    "size": "3 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 17 August 2024  18:26:25 -0400 (0:00:00.700)       0:00:39.612 ******* 
ok: [managed_node3] => {
    "bytes": 3221225472,
    "changed": false,
    "lvm": "3g",
    "parted": "3GiB",
    "size": "3 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 17 August 2024  18:26:25 -0400 (0:00:00.521)       0:00:40.133 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "3221225472"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 17 August 2024  18:26:25 -0400 (0:00:00.048)       0:00:40.182 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 17 August 2024  18:26:25 -0400 (0:00:00.034)       0:00:40.217 ******* 
ok: [managed_node3] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.456)       0:00:40.673 ******* 
skipping: [managed_node3] => {}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.037)       0:00:40.711 ******* 
skipping: [managed_node3] => {}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.036)       0:00:40.747 ******* 
skipping: [managed_node3] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.045)       0:00:40.792 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.037)       0:00:40.830 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.043)       0:00:40.874 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.043)       0:00:40.917 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.034)       0:00:40.952 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.031)       0:00:40.983 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.032)       0:00:41.015 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.043)       0:00:41.059 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.038)       0:00:41.097 ******* 
skipping: [managed_node3] => {}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.031)       0:00:41.129 ******* 
skipping: [managed_node3] => {}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.030)       0:00:41.160 ******* 
skipping: [managed_node3] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.046)       0:00:41.206 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.032)       0:00:41.238 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.030)       0:00:41.269 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.024)       0:00:41.294 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 17 August 2024  18:26:26 -0400 (0:00:00.021)       0:00:41.315 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.020)       0:00:41.336 ******* 
ok: [managed_node3] => {
    "storage_test_actual_size": {
        "bytes": 3221225472,
        "changed": false,
        "failed": false,
        "lvm": "3g",
        "parted": "3GiB",
        "size": "3 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.021)       0:00:41.357 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.022)       0:00:41.380 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.029)       0:00:41.409 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.024158",
    "end": "2024-08-17 18:26:27.431250",
    "rc": 0,
    "start": "2024-08-17 18:26:27.407092"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.415)       0:00:41.824 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.035)       0:00:41.860 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.039)       0:00:41.899 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.033)       0:00:41.933 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.034)       0:00:41.967 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.033)       0:00:42.001 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.033)       0:00:42.034 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.032)       0:00:42.066 ******* 

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.048)       0:00:42.115 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Change the mount location to /opt/test2] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:45
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.031)       0:00:42.146 ******* 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.071)       0:00:42.217 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.052)       0:00:42.270 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 17 August 2024  18:26:27 -0400 (0:00:00.043)       0:00:42.313 ******* 
skipping: [managed_node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed_node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.092)       0:00:42.406 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.035)       0:00:42.441 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.035)       0:00:42.477 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.039)       0:00:42.516 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.029)       0:00:42.546 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.076)       0:00:42.623 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.032)       0:00:42.655 ******* 
ok: [managed_node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "mount_point": "/opt/test2",
                    "name": "test1",
                    "size": "3g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.038)       0:00:42.693 ******* 
ok: [managed_node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.046)       0:00:42.740 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.076)       0:00:42.816 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.072)       0:00:42.888 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.047)       0:00:42.936 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.031)       0:00:42.967 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.049)       0:00:43.016 ******* 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 17 August 2024  18:26:28 -0400 (0:00:00.031)       0:00:43.048 ******* 
ok: [managed_node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/mapper/foo-test1",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "path": "/opt/test1",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test2",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 17 August 2024  18:26:33 -0400 (0:00:04.521)       0:00:47.569 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 17 August 2024  18:26:33 -0400 (0:00:00.019)       0:00:47.589 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933570.3640273,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "a1522684f5b6a445a50f2611a4e0757a4aec1cf1",
        "ctime": 1723933570.3610272,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 316670089,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1723933570.3610272,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1393,
        "uid": 0,
        "version": "1783227297",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 17 August 2024  18:26:33 -0400 (0:00:00.342)       0:00:47.932 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 17 August 2024  18:26:33 -0400 (0:00:00.022)       0:00:47.954 ******* 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 17 August 2024  18:26:33 -0400 (0:00:00.018)       0:00:47.972 ******* 
ok: [managed_node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/mapper/foo-test1",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "path": "/opt/test1",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test2",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 17 August 2024  18:26:33 -0400 (0:00:00.057)       0:00:48.030 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 17 August 2024  18:26:33 -0400 (0:00:00.025)       0:00:48.055 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 17 August 2024  18:26:33 -0400 (0:00:00.023)       0:00:48.079 ******* 
changed: [managed_node3] => (item={'path': '/opt/test1', 'state': 'absent'}) => {
    "ansible_loop_var": "mount_info",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "mount_info": {
        "path": "/opt/test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 17 August 2024  18:26:34 -0400 (0:00:00.378)       0:00:48.457 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 17 August 2024  18:26:34 -0400 (0:00:00.605)       0:00:49.063 ******* 
changed: [managed_node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 17 August 2024  18:26:35 -0400 (0:00:00.403)       0:00:49.466 ******* 
skipping: [managed_node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 17 August 2024  18:26:35 -0400 (0:00:00.042)       0:00:49.508 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 17 August 2024  18:26:35 -0400 (0:00:00.594)       0:00:50.102 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933211.7108438,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 17 August 2024  18:26:36 -0400 (0:00:00.438)       0:00:50.541 ******* 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 17 August 2024  18:26:36 -0400 (0:00:00.032)       0:00:50.574 ******* 
ok: [managed_node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:57
Saturday 17 August 2024  18:26:36 -0400 (0:00:00.737)       0:00:51.311 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed_node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 17 August 2024  18:26:37 -0400 (0:00:00.040)       0:00:51.352 ******* 
ok: [managed_node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 17 August 2024  18:26:37 -0400 (0:00:00.027)       0:00:51.380 ******* 
skipping: [managed_node3] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 17 August 2024  18:26:37 -0400 (0:00:00.021)       0:00:51.401 ******* 
ok: [managed_node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test2",
            "name": "/dev/mapper/foo-test1",
            "size": "3G",
            "type": "lvm",
            "uuid": "fae1f88e-f90d-4aeb-9c44-8a9fc134e1da"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "tmL8Zr-ptcF-EfsK-KB6c-NeVU-jo8v-UeqVze"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "fe591198-9082-4b15-9b62-e83518524cd2"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 17 August 2024  18:26:37 -0400 (0:00:00.342)       0:00:51.744 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002368",
    "end": "2024-08-17 18:26:37.706180",
    "rc": 0,
    "start": "2024-08-17 18:26:37.703812"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed May 29 07:43:06 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=fe591198-9082-4b15-9b62-e83518524cd2 /                       xfs     defaults        0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
/dev/mapper/foo-test1 /opt/test2 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 17 August 2024  18:26:37 -0400 (0:00:00.350)       0:00:52.094 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002358",
    "end": "2024-08-17 18:26:38.055661",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-08-17 18:26:38.053303"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 17 August 2024  18:26:38 -0400 (0:00:00.339)       0:00:52.434 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed_node3

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 17 August 2024  18:26:38 -0400 (0:00:00.039)       0:00:52.473 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 17 August 2024  18:26:38 -0400 (0:00:00.041)       0:00:52.515 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.024112",
    "end": "2024-08-17 18:26:38.496284",
    "rc": 0,
    "start": "2024-08-17 18:26:38.472172"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 17 August 2024  18:26:38 -0400 (0:00:00.369)       0:00:52.884 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 17 August 2024  18:26:38 -0400 (0:00:00.035)       0:00:52.920 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 17 August 2024  18:26:38 -0400 (0:00:00.051)       0:00:52.972 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 17 August 2024  18:26:38 -0400 (0:00:00.036)       0:00:53.008 ******* 
ok: [managed_node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.351)       0:00:53.359 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.029)       0:00:53.389 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.032)       0:00:53.421 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.031)       0:00:53.453 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.025)       0:00:53.478 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.025)       0:00:53.503 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.023)       0:00:53.526 ******* 
ok: [managed_node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.031)       0:00:53.558 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

False



STDERR:

Shared connection to 10.31.45.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.396)       0:00:53.954 ******* 
skipping: [managed_node3] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.050)       0:00:54.005 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed_node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.073)       0:00:54.079 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.083)       0:00:54.162 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.059)       0:00:54.222 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.035)       0:00:54.258 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 17 August 2024  18:26:39 -0400 (0:00:00.041)       0:00:54.300 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.028)       0:00:54.328 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.035)       0:00:54.364 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.031)       0:00:54.395 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.028)       0:00:54.424 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.022)       0:00:54.446 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.025)       0:00:54.472 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.019)       0:00:54.491 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed_node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.042)       0:00:54.534 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed_node3

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.040)       0:00:54.574 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.021)       0:00:54.596 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.022)       0:00:54.618 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.021)       0:00:54.639 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.026)       0:00:54.666 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.025)       0:00:54.692 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.033)       0:00:54.726 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.033)       0:00:54.759 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed_node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.071)       0:00:54.831 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed_node3

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.066)       0:00:54.897 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.024)       0:00:54.921 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.055)       0:00:54.976 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.024)       0:00:55.001 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.027)       0:00:55.029 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.049)       0:00:55.078 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.026)       0:00:55.105 ******* 
skipping: [managed_node3] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.025)       0:00:55.130 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed_node3

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.041)       0:00:55.171 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.025)       0:00:55.196 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.032)       0:00:55.229 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.021)       0:00:55.251 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.031)       0:00:55.282 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 17 August 2024  18:26:40 -0400 (0:00:00.032)       0:00:55.315 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.031)       0:00:55.347 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.032)       0:00:55.379 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed_node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.086)       0:00:55.466 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed_node3

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.069)       0:00:55.536 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.039)       0:00:55.575 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.034)       0:00:55.610 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.023)       0:00:55.634 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.023)       0:00:55.657 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.028)       0:00:55.685 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.026)       0:00:55.712 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.033)       0:00:55.745 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed_node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.078)       0:00:55.823 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.031)       0:00:55.855 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.034)       0:00:55.889 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.040)       0:00:55.930 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.029)       0:00:55.960 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.030)       0:00:55.990 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.023)       0:00:56.013 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.038)       0:00:56.051 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed_node3

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.064)       0:00:56.116 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 17 August 2024  18:26:41 -0400 (0:00:00.051)       0:00:56.168 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed_node3

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.164)       0:00:56.332 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.038)       0:00:56.371 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.054)       0:00:56.425 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.039)       0:00:56.465 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.057)       0:00:56.522 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.034)       0:00:56.556 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.040)       0:00:56.597 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.058)       0:00:56.656 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.046)       0:00:56.703 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.036)       0:00:56.740 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.037)       0:00:56.777 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.031)       0:00:56.808 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test2 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test2 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.079)       0:00:56.888 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.049)       0:00:56.938 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.045)       0:00:56.983 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.078)       0:00:57.062 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.037)       0:00:57.099 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.030)       0:00:57.130 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.054)       0:00:57.185 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 17 August 2024  18:26:42 -0400 (0:00:00.053)       0:00:57.238 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933567.926009,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1723933567.926009,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 124319,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1723933567.926009,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.532)       0:00:57.771 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.045)       0:00:57.816 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.039)       0:00:57.856 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.039)       0:00:57.896 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.045)       0:00:57.941 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.034)       0:00:57.976 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.039)       0:00:58.015 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 17 August 2024  18:26:43 -0400 (0:00:00.031)       0:00:58.046 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 17 August 2024  18:26:46 -0400 (0:00:03.001)       0:01:01.048 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.021)       0:01:01.070 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.022)       0:01:01.093 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.033)       0:01:01.126 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.022)       0:01:01.149 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.022)       0:01:01.171 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.021)       0:01:01.193 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.022)       0:01:01.215 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.021)       0:01:01.237 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.030)       0:01:01.268 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.026)       0:01:01.295 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 17 August 2024  18:26:46 -0400 (0:00:00.021)       0:01:01.317 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.023)       0:01:01.340 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.363 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.020)       0:01:01.384 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.024)       0:01:01.408 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.430 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.021)       0:01:01.452 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.474 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.496 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.519 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.023)       0:01:01.542 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.565 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.587 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.022)       0:01:01.609 ******* 
ok: [managed_node3] => {
    "bytes": 3221225472,
    "changed": false,
    "lvm": "3g",
    "parted": "3GiB",
    "size": "3 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 17 August 2024  18:26:47 -0400 (0:00:00.352)       0:01:01.962 ******* 
ok: [managed_node3] => {
    "bytes": 3221225472,
    "changed": false,
    "lvm": "3g",
    "parted": "3GiB",
    "size": "3 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.366)       0:01:02.328 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "3221225472"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.028)       0:01:02.357 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.022)       0:01:02.379 ******* 
ok: [managed_node3] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.371)       0:01:02.750 ******* 
skipping: [managed_node3] => {}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.035)       0:01:02.786 ******* 
skipping: [managed_node3] => {}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.033)       0:01:02.820 ******* 
skipping: [managed_node3] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.039)       0:01:02.859 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.036)       0:01:02.896 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.032)       0:01:02.929 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.030)       0:01:02.960 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.031)       0:01:02.992 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.031)       0:01:03.023 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.037)       0:01:03.061 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.030)       0:01:03.091 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.032)       0:01:03.124 ******* 
skipping: [managed_node3] => {}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.032)       0:01:03.157 ******* 
skipping: [managed_node3] => {}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.033)       0:01:03.190 ******* 
skipping: [managed_node3] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.032)       0:01:03.223 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.032)       0:01:03.255 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.039)       0:01:03.294 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 17 August 2024  18:26:48 -0400 (0:00:00.031)       0:01:03.325 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.028)       0:01:03.354 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.032)       0:01:03.386 ******* 
ok: [managed_node3] => {
    "storage_test_actual_size": {
        "bytes": 3221225472,
        "changed": false,
        "failed": false,
        "lvm": "3g",
        "parted": "3GiB",
        "size": "3 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.034)       0:01:03.421 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.040)       0:01:03.462 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.059)       0:01:03.521 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.026008",
    "end": "2024-08-17 18:26:49.609366",
    "rc": 0,
    "start": "2024-08-17 18:26:49.583358"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.504)       0:01:04.026 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.076)       0:01:04.103 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.052)       0:01:04.155 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.047)       0:01:04.203 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.039)       0:01:04.242 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.038)       0:01:04.281 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 17 August 2024  18:26:49 -0400 (0:00:00.035)       0:01:04.317 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.035)       0:01:04.353 ******* 

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.035)       0:01:04.388 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:60
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.029)       0:01:04.418 ******* 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.061)       0:01:04.479 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.032)       0:01:04.512 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.039)       0:01:04.551 ******* 
skipping: [managed_node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed_node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.090)       0:01:04.642 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.044)       0:01:04.686 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.036)       0:01:04.723 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.020)       0:01:04.743 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.032)       0:01:04.776 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.067)       0:01:04.844 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.023)       0:01:04.868 ******* 
ok: [managed_node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "volumes": [
                {
                    "mount_point": "/opt/test2",
                    "name": "test1",
                    "size": "3g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.033)       0:01:04.901 ******* 
ok: [managed_node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.023)       0:01:04.924 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.023)       0:01:04.947 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.022)       0:01:04.969 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.022)       0:01:04.992 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.021)       0:01:05.013 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.035)       0:01:05.049 ******* 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 17 August 2024  18:26:50 -0400 (0:00:00.021)       0:01:05.070 ******* 
ok: [managed_node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/mapper/foo-test1",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test2",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "lvm2"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 17 August 2024  18:26:54 -0400 (0:00:04.077)       0:01:09.148 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 17 August 2024  18:26:54 -0400 (0:00:00.021)       0:01:09.170 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933595.0612195,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "7cabf19be818cfae5507ca95214082d911711f1e",
        "ctime": 1723933595.0582194,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 316670089,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1723933595.0582194,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1393,
        "uid": 0,
        "version": "1783227297",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.356)       0:01:09.526 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.027)       0:01:09.554 ******* 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.023)       0:01:09.577 ******* 
ok: [managed_node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/mapper/foo-test1",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test2",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "lvm2"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.027)       0:01:09.605 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.031)       0:01:09.636 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.024)       0:01:09.661 ******* 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.020)       0:01:09.682 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 17 August 2024  18:26:55 -0400 (0:00:00.622)       0:01:10.305 ******* 
ok: [managed_node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 17 August 2024  18:26:56 -0400 (0:00:00.365)       0:01:10.670 ******* 
skipping: [managed_node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 17 August 2024  18:26:56 -0400 (0:00:00.062)       0:01:10.733 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 17 August 2024  18:26:57 -0400 (0:00:00.665)       0:01:11.398 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933211.7108438,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 17 August 2024  18:26:57 -0400 (0:00:00.371)       0:01:11.770 ******* 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 17 August 2024  18:26:57 -0400 (0:00:00.027)       0:01:11.797 ******* 
ok: [managed_node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:72
Saturday 17 August 2024  18:26:58 -0400 (0:00:00.733)       0:01:12.531 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed_node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 17 August 2024  18:26:58 -0400 (0:00:00.045)       0:01:12.576 ******* 
ok: [managed_node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 17 August 2024  18:26:58 -0400 (0:00:00.027)       0:01:12.604 ******* 
skipping: [managed_node3] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 17 August 2024  18:26:58 -0400 (0:00:00.021)       0:01:12.625 ******* 
ok: [managed_node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test2",
            "name": "/dev/mapper/foo-test1",
            "size": "3G",
            "type": "lvm",
            "uuid": "fae1f88e-f90d-4aeb-9c44-8a9fc134e1da"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "tmL8Zr-ptcF-EfsK-KB6c-NeVU-jo8v-UeqVze"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "fe591198-9082-4b15-9b62-e83518524cd2"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 17 August 2024  18:26:58 -0400 (0:00:00.360)       0:01:12.985 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002423",
    "end": "2024-08-17 18:26:58.938668",
    "rc": 0,
    "start": "2024-08-17 18:26:58.936245"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed May 29 07:43:06 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=fe591198-9082-4b15-9b62-e83518524cd2 /                       xfs     defaults        0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
/dev/mapper/foo-test1 /opt/test2 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 17 August 2024  18:26:59 -0400 (0:00:00.355)       0:01:13.341 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002392",
    "end": "2024-08-17 18:26:59.359360",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-08-17 18:26:59.356968"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 17 August 2024  18:26:59 -0400 (0:00:00.398)       0:01:13.740 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed_node3

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 17 August 2024  18:26:59 -0400 (0:00:00.062)       0:01:13.802 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 17 August 2024  18:26:59 -0400 (0:00:00.035)       0:01:13.838 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.024227",
    "end": "2024-08-17 18:26:59.902467",
    "rc": 0,
    "start": "2024-08-17 18:26:59.878240"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 17 August 2024  18:26:59 -0400 (0:00:00.454)       0:01:14.293 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 17 August 2024  18:26:59 -0400 (0:00:00.032)       0:01:14.325 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.053)       0:01:14.379 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.033)       0:01:14.412 ******* 
ok: [managed_node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.398)       0:01:14.811 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.037)       0:01:14.849 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.038)       0:01:14.888 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.038)       0:01:14.926 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.030)       0:01:14.956 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.035)       0:01:14.991 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.029)       0:01:15.021 ******* 
ok: [managed_node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 17 August 2024  18:27:00 -0400 (0:00:00.052)       0:01:15.073 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

False



STDERR:

Shared connection to 10.31.45.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.395)       0:01:15.468 ******* 
skipping: [managed_node3] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.034)       0:01:15.503 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed_node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.116)       0:01:15.620 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.044)       0:01:15.665 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.034)       0:01:15.699 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.024)       0:01:15.723 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.043)       0:01:15.767 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.023)       0:01:15.791 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.033)       0:01:15.824 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.028)       0:01:15.852 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.021)       0:01:15.874 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.025)       0:01:15.900 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.023)       0:01:15.923 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.024)       0:01:15.948 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed_node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.048)       0:01:15.996 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed_node3

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.057)       0:01:16.054 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.033)       0:01:16.087 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.073)       0:01:16.161 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.048)       0:01:16.209 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.046)       0:01:16.256 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 17 August 2024  18:27:01 -0400 (0:00:00.062)       0:01:16.318 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.065)       0:01:16.384 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.042)       0:01:16.426 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed_node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.077)       0:01:16.504 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed_node3

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.056)       0:01:16.560 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.024)       0:01:16.585 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.028)       0:01:16.614 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.023)       0:01:16.638 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.025)       0:01:16.663 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.046)       0:01:16.709 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.025)       0:01:16.735 ******* 
skipping: [managed_node3] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.027)       0:01:16.762 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed_node3

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.045)       0:01:16.807 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.029)       0:01:16.837 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.043)       0:01:16.881 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.040)       0:01:16.922 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.035)       0:01:16.958 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.051)       0:01:17.009 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.074)       0:01:17.084 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.035)       0:01:17.119 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed_node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.084)       0:01:17.203 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed_node3

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 17 August 2024  18:27:02 -0400 (0:00:00.084)       0:01:17.288 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.042)       0:01:17.331 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.033)       0:01:17.365 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.033)       0:01:17.398 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.033)       0:01:17.432 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.036)       0:01:17.469 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.049)       0:01:17.518 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.063)       0:01:17.582 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed_node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.130)       0:01:17.712 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.058)       0:01:17.771 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.078)       0:01:17.849 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.049)       0:01:17.899 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.048)       0:01:17.947 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.033)       0:01:17.980 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.062)       0:01:18.043 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.034)       0:01:18.078 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed_node3

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.072)       0:01:18.151 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 17 August 2024  18:27:03 -0400 (0:00:00.041)       0:01:18.192 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed_node3

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.201)       0:01:18.393 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.043)       0:01:18.436 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.040)       0:01:18.476 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.032)       0:01:18.508 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.037)       0:01:18.546 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.031)       0:01:18.578 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.029)       0:01:18.607 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.042)       0:01:18.650 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.033)       0:01:18.684 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.057)       0:01:18.741 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.061)       0:01:18.803 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.032)       0:01:18.835 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test2 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test2 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.080)       0:01:18.916 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.066)       0:01:18.982 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.037)       0:01:19.020 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.050)       0:01:19.070 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.047)       0:01:19.118 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.054)       0:01:19.172 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.048)       0:01:19.221 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 17 August 2024  18:27:04 -0400 (0:00:00.086)       0:01:19.308 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933567.926009,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1723933567.926009,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 124319,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1723933567.926009,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 17 August 2024  18:27:05 -0400 (0:00:00.643)       0:01:19.951 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 17 August 2024  18:27:05 -0400 (0:00:00.039)       0:01:19.991 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 17 August 2024  18:27:05 -0400 (0:00:00.032)       0:01:20.023 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 17 August 2024  18:27:05 -0400 (0:00:00.036)       0:01:20.059 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 17 August 2024  18:27:05 -0400 (0:00:00.051)       0:01:20.111 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 17 August 2024  18:27:05 -0400 (0:00:00.075)       0:01:20.186 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 17 August 2024  18:27:05 -0400 (0:00:00.110)       0:01:20.297 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 17 August 2024  18:27:06 -0400 (0:00:00.040)       0:01:20.337 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 17 August 2024  18:27:09 -0400 (0:00:03.100)       0:01:23.438 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.021)       0:01:23.460 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.023)       0:01:23.484 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.030)       0:01:23.514 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.023)       0:01:23.538 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.031)       0:01:23.570 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.020)       0:01:23.591 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.020)       0:01:23.611 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.019)       0:01:23.631 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.027)       0:01:23.658 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.024)       0:01:23.682 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.020)       0:01:23.703 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.018)       0:01:23.722 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.018)       0:01:23.741 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.020)       0:01:23.761 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.019)       0:01:23.781 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.019)       0:01:23.800 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.020)       0:01:23.821 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.019)       0:01:23.840 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.019)       0:01:23.859 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.018)       0:01:23.878 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.018)       0:01:23.897 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.018)       0:01:23.916 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.021)       0:01:23.937 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.020)       0:01:23.957 ******* 
ok: [managed_node3] => {
    "bytes": 3221225472,
    "changed": false,
    "lvm": "3g",
    "parted": "3GiB",
    "size": "3 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 17 August 2024  18:27:09 -0400 (0:00:00.341)       0:01:24.299 ******* 
ok: [managed_node3] => {
    "bytes": 3221225472,
    "changed": false,
    "lvm": "3g",
    "parted": "3GiB",
    "size": "3 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.335)       0:01:24.635 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "3221225472"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.027)       0:01:24.663 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.020)       0:01:24.683 ******* 
ok: [managed_node3] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.384)       0:01:25.068 ******* 
skipping: [managed_node3] => {}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.033)       0:01:25.101 ******* 
skipping: [managed_node3] => {}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.034)       0:01:25.136 ******* 
skipping: [managed_node3] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.034)       0:01:25.170 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.036)       0:01:25.207 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.029)       0:01:25.236 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.030)       0:01:25.267 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.028)       0:01:25.295 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 17 August 2024  18:27:10 -0400 (0:00:00.031)       0:01:25.326 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.030)       0:01:25.357 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.032)       0:01:25.389 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.029)       0:01:25.419 ******* 
skipping: [managed_node3] => {}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.024)       0:01:25.443 ******* 
skipping: [managed_node3] => {}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.023)       0:01:25.467 ******* 
skipping: [managed_node3] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.020)       0:01:25.487 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.030)       0:01:25.518 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.059)       0:01:25.577 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.039)       0:01:25.616 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.021)       0:01:25.637 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.025)       0:01:25.663 ******* 
ok: [managed_node3] => {
    "storage_test_actual_size": {
        "bytes": 3221225472,
        "changed": false,
        "failed": false,
        "lvm": "3g",
        "parted": "3GiB",
        "size": "3 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.038)       0:01:25.701 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.036)       0:01:25.738 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.044)       0:01:25.783 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.024259",
    "end": "2024-08-17 18:27:11.848393",
    "rc": 0,
    "start": "2024-08-17 18:27:11.824134"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.465)       0:01:26.248 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 17 August 2024  18:27:11 -0400 (0:00:00.045)       0:01:26.294 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.038)       0:01:26.332 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.035)       0:01:26.368 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.031)       0:01:26.400 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.046)       0:01:26.446 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.056)       0:01:26.503 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.032)       0:01:26.536 ******* 

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.028)       0:01:26.564 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:75
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.035)       0:01:26.600 ******* 

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.120)       0:01:26.720 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.039)       0:01:26.760 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.025)       0:01:26.786 ******* 
skipping: [managed_node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed_node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}
ok: [managed_node3] => (item=CentOS_8.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_8.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_8.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.054)       0:01:26.841 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.020)       0:01:26.862 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.019)       0:01:26.881 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.021)       0:01:26.902 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.020)       0:01:26.923 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed_node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.073)       0:01:26.997 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.035)       0:01:27.033 ******* 
ok: [managed_node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "volumes": [
                {
                    "mount_point": "/opt/test2",
                    "name": "test1",
                    "size": "3g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.035)       0:01:27.069 ******* 
ok: [managed_node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.033)       0:01:27.102 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.037)       0:01:27.140 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.035)       0:01:27.175 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.033)       0:01:27.209 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.034)       0:01:27.243 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.050)       0:01:27.294 ******* 

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 17 August 2024  18:27:12 -0400 (0:00:00.030)       0:01:27.324 ******* 
changed: [managed_node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test2",
            "src": "/dev/mapper/foo-test1",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 17 August 2024  18:27:17 -0400 (0:00:04.689)       0:01:32.013 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 17 August 2024  18:27:17 -0400 (0:00:00.022)       0:01:32.035 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933595.0612195,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "7cabf19be818cfae5507ca95214082d911711f1e",
        "ctime": 1723933595.0582194,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 316670089,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1723933595.0582194,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1393,
        "uid": 0,
        "version": "1783227297",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 17 August 2024  18:27:18 -0400 (0:00:00.351)       0:01:32.386 ******* 
ok: [managed_node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 17 August 2024  18:27:18 -0400 (0:00:00.349)       0:01:32.736 ******* 

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 17 August 2024  18:27:18 -0400 (0:00:00.018)       0:01:32.755 ******* 
ok: [managed_node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test2",
                "src": "/dev/mapper/foo-test1",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 17 August 2024  18:27:18 -0400 (0:00:00.026)       0:01:32.781 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "3g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 17 August 2024  18:27:18 -0400 (0:00:00.024)       0:01:32.806 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 17 August 2024  18:27:18 -0400 (0:00:00.025)       0:01:32.831 ******* 
changed: [managed_node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "absent"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 17 August 2024  18:27:18 -0400 (0:00:00.359)       0:01:33.190 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 17 August 2024  18:27:19 -0400 (0:00:00.601)       0:01:33.792 ******* 

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 17 August 2024  18:27:19 -0400 (0:00:00.029)       0:01:33.821 ******* 

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 17 August 2024  18:27:19 -0400 (0:00:00.028)       0:01:33.849 ******* 
ok: [managed_node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 17 August 2024  18:27:20 -0400 (0:00:00.630)       0:01:34.479 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "atime": 1723933211.7108438,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1716968941.893,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 135,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1716968586.525,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1157759751",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 17 August 2024  18:27:20 -0400 (0:00:00.363)       0:01:34.843 ******* 

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 17 August 2024  18:27:20 -0400 (0:00:00.018)       0:01:34.862 ******* 
ok: [managed_node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:88
Saturday 17 August 2024  18:27:21 -0400 (0:00:00.829)       0:01:35.692 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed_node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 17 August 2024  18:27:21 -0400 (0:00:00.046)       0:01:35.738 ******* 
ok: [managed_node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "3g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 17 August 2024  18:27:21 -0400 (0:00:00.031)       0:01:35.770 ******* 
skipping: [managed_node3] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 17 August 2024  18:27:21 -0400 (0:00:00.023)       0:01:35.794 ******* 
ok: [managed_node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "fe591198-9082-4b15-9b62-e83518524cd2"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 17 August 2024  18:27:21 -0400 (0:00:00.355)       0:01:36.150 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002574",
    "end": "2024-08-17 18:27:22.113157",
    "rc": 0,
    "start": "2024-08-17 18:27:22.110583"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Wed May 29 07:43:06 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=fe591198-9082-4b15-9b62-e83518524cd2 /                       xfs     defaults        0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.358)       0:01:36.508 ******* 
ok: [managed_node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002598",
    "end": "2024-08-17 18:27:22.479664",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-08-17 18:27:22.477066"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.356)       0:01:36.865 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed_node3

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.049)       0:01:36.915 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.033)       0:01:36.949 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.023)       0:01:36.972 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.032)       0:01:37.005 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.045)       0:01:37.050 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "0",
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.028)       0:01:37.079 ******* 

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.020)       0:01:37.100 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "0"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.024)       0:01:37.124 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": []
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.032)       0:01:37.157 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.030)       0:01:37.188 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.037)       0:01:37.226 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.037)       0:01:37.263 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 17 August 2024  18:27:22 -0400 (0:00:00.031)       0:01:37.295 ******* 

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.046)       0:01:37.341 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

False



STDERR:

Shared connection to 10.31.45.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.429)       0:01:37.770 ******* 

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.027)       0:01:37.797 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed_node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.058)       0:01:37.856 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.028)       0:01:37.885 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.029)       0:01:37.914 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.024)       0:01:37.939 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.027)       0:01:37.966 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.025)       0:01:37.991 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.029)       0:01:38.021 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.059)       0:01:38.080 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.021)       0:01:38.101 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.019)       0:01:38.121 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.020)       0:01:38.142 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.019)       0:01:38.161 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed_node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.040)       0:01:38.202 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed_node3

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.040)       0:01:38.243 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.020)       0:01:38.263 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.019)       0:01:38.282 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.020)       0:01:38.303 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Saturday 17 August 2024  18:27:23 -0400 (0:00:00.019)       0:01:38.322 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.019)       0:01:38.341 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.019)       0:01:38.361 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.019)       0:01:38.380 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed_node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.040)       0:01:38.421 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed_node3

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.040)       0:01:38.462 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.019)       0:01:38.481 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.019)       0:01:38.501 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.020)       0:01:38.521 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.017)       0:01:38.539 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed_node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.047)       0:01:38.587 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.025)       0:01:38.613 ******* 

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.029)       0:01:38.643 ******* 

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.028)       0:01:38.671 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.031)       0:01:38.703 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed_node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.075)       0:01:38.778 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed_node3

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.054)       0:01:38.833 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.023)       0:01:38.857 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.025)       0:01:38.883 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.023)       0:01:38.906 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.026)       0:01:38.933 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.025)       0:01:38.958 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.020)       0:01:38.979 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.020)       0:01:38.999 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed_node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.056)       0:01:39.055 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.024)       0:01:39.080 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.021)       0:01:39.102 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.022)       0:01:39.125 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.045)       0:01:39.170 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.034)       0:01:39.205 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.030)       0:01:39.236 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.024)       0:01:39.261 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed_node3

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 17 August 2024  18:27:24 -0400 (0:00:00.052)       0:01:39.314 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.038)       0:01:39.353 ******* 
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed_node3
included: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed_node3

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.158)       0:01:39.511 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.025)       0:01:39.536 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.034)       0:01:39.571 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.036)       0:01:39.608 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.037)       0:01:39.645 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.035)       0:01:39.680 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.039)       0:01:39.719 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.035)       0:01:39.754 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.029)       0:01:39.784 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.031)       0:01:39.815 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.026)       0:01:39.842 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.027)       0:01:39.869 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.062)       0:01:39.932 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.025)       0:01:39.957 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.024)       0:01:39.982 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.019)       0:01:40.002 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.023)       0:01:40.025 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.019)       0:01:40.045 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.027)       0:01:40.072 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 17 August 2024  18:27:25 -0400 (0:00:00.033)       0:01:40.105 ******* 
ok: [managed_node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.389)       0:01:40.494 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.021)       0:01:40.516 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.026)       0:01:40.542 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.016)       0:01:40.559 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.022)       0:01:40.581 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.021)       0:01:40.602 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.018)       0:01:40.621 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 17 August 2024  18:27:26 -0400 (0:00:00.023)       0:01:40.645 ******* 
ok: [managed_node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 17 August 2024  18:27:29 -0400 (0:00:03.047)       0:01:43.692 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.031)       0:01:43.724 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.033)       0:01:43.758 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.060)       0:01:43.819 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.047)       0:01:43.866 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.032)       0:01:43.898 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.069)       0:01:43.967 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.064)       0:01:44.031 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.050)       0:01:44.082 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.073)       0:01:44.155 ******* 
ok: [managed_node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 17 August 2024  18:27:29 -0400 (0:00:00.091)       0:01:44.247 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.081)       0:01:44.328 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.039)       0:01:44.368 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.031)       0:01:44.400 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.030)       0:01:44.430 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.039)       0:01:44.470 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.044)       0:01:44.514 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.031)       0:01:44.546 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.030)       0:01:44.576 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.040)       0:01:44.617 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.063)       0:01:44.681 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.030)       0:01:44.712 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.049)       0:01:44.762 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.047)       0:01:44.809 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.066)       0:01:44.876 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.045)       0:01:44.922 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.040)       0:01:44.963 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.032)       0:01:44.995 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.044)       0:01:45.039 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.034)       0:01:45.073 ******* 
skipping: [managed_node3] => {}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.031)       0:01:45.104 ******* 
skipping: [managed_node3] => {}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.031)       0:01:45.135 ******* 
skipping: [managed_node3] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.043)       0:01:45.178 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.034)       0:01:45.213 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 17 August 2024  18:27:30 -0400 (0:00:00.039)       0:01:45.253 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.095)       0:01:45.348 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.033)       0:01:45.382 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.031)       0:01:45.413 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.029)       0:01:45.443 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.039)       0:01:45.482 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.033)       0:01:45.516 ******* 
skipping: [managed_node3] => {}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.038)       0:01:45.554 ******* 
skipping: [managed_node3] => {}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.032)       0:01:45.586 ******* 
skipping: [managed_node3] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.032)       0:01:45.619 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.034)       0:01:45.653 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.043)       0:01:45.697 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.032)       0:01:45.730 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.036)       0:01:45.767 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.029)       0:01:45.797 ******* 
ok: [managed_node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.046)       0:01:45.844 ******* 
ok: [managed_node3] => {
    "storage_test_expected_size": "3221225472"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.035)       0:01:45.879 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.035)       0:01:45.914 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.032)       0:01:45.947 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.025)       0:01:45.973 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.024)       0:01:45.997 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.024)       0:01:46.022 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.023)       0:01:46.045 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.031)       0:01:46.077 ******* 
skipping: [managed_node3] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.027)       0:01:46.105 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.036)       0:01:46.141 ******* 

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 17 August 2024  18:27:31 -0400 (0:00:00.036)       0:01:46.178 ******* 
ok: [managed_node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}
META: ran handlers
META: ran handlers

PLAY RECAP *********************************************************************
managed_node3              : ok=417  changed=6    unreachable=0    failed=0    skipped=462  rescued=0    ignored=0   

Saturday 17 August 2024  18:27:31 -0400 (0:00:00.031)       0:01:46.210 ******* 
=============================================================================== 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.69s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.52s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.49s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.08s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
Ensure cryptsetup is present -------------------------------------------- 3.10s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure cryptsetup is present -------------------------------------------- 3.05s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure cryptsetup is present -------------------------------------------- 3.04s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure cryptsetup is present -------------------------------------------- 3.00s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
fedora.linux_system_roles.storage : Make sure blivet is available ------- 2.92s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 
Ensure test packages ---------------------------------------------------- 2.85s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 
fedora.linux_system_roles.storage : Make sure required packages are installed --- 2.82s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 
fedora.linux_system_roles.storage : Get service facts ------------------- 1.75s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 
Gathering Facts --------------------------------------------------------- 1.11s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/tests/storage/tests_change_mount.yml:2 
fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab --- 0.93s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.83s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.74s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.73s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.73s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.72s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Get required packages --------------- 0.71s
/tmp/collections-ryv/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19