ansible-playbook [core 2.17.7]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
  ansible collection location = /tmp/collections-0t1
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.12.7 (main, Oct  1 2024, 00:00:00) [GCC 14.2.1 20240912 (Red Hat 14.2.1-3)] (/usr/bin/python3.12)
  jinja version = 3.1.4
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_swap.yml *******************************************************
1 plays in /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml

PLAY [Test management of swap] *************************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:2
Friday 17 January 2025  09:58:10 -0500 (0:00:00.011)       0:00:00.011 ******** 
[WARNING]: Platform linux on host managed-node3 is using the discovered Python
interpreter at /usr/bin/python3.12, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node3]

TASK [Include role to ensure packages are installed] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:10
Friday 17 January 2025  09:58:12 -0500 (0:00:01.514)       0:00:01.526 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  09:58:12 -0500 (0:00:00.030)       0:00:01.557 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  09:58:12 -0500 (0:00:00.023)       0:00:01.581 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  09:58:12 -0500 (0:00:00.039)       0:00:01.620 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  09:58:12 -0500 (0:00:00.043)       0:00:01.663 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  09:58:12 -0500 (0:00:00.496)       0:00:02.160 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  09:58:13 -0500 (0:00:00.039)       0:00:02.199 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  09:58:13 -0500 (0:00:00.026)       0:00:02.225 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  09:58:13 -0500 (0:00:00.019)       0:00:02.245 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  09:58:13 -0500 (0:00:00.058)       0:00:02.304 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  09:58:14 -0500 (0:00:01.700)       0:00:04.004 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  09:58:14 -0500 (0:00:00.037)       0:00:04.041 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  09:58:14 -0500 (0:00:00.032)       0:00:04.074 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  09:58:15 -0500 (0:00:00.898)       0:00:04.972 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Friday 17 January 2025  09:58:15 -0500 (0:00:00.045)       0:00:05.018 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Friday 17 January 2025  09:58:15 -0500 (0:00:00.035)       0:00:05.053 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "install_copr | d(false) | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Friday 17 January 2025  09:58:15 -0500 (0:00:00.037)       0:00:05.091 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  09:58:15 -0500 (0:00:00.035)       0:00:05.126 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  09:58:17 -0500 (0:00:01.569)       0:00:06.695 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "arp-ethers.service": {
                "name": "arp-ethers.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "bluetooth.service": {
                "name": "bluetooth.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.bluez.service": {
                "name": "dbus-org.bluez.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.home1.service": {
                "name": "dbus-org.freedesktop.home1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.oom1.service": {
                "name": "dbus-org.freedesktop.oom1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.portable1.service": {
                "name": "dbus-org.freedesktop.portable1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.resolve1.service": {
                "name": "dbus-org.freedesktop.resolve1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fcoe.service": {
                "name": "fcoe.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fwupd-offline-update.service": {
                "name": "fwupd-offline-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "fwupd-refresh.service": {
                "name": "fwupd-refresh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "fwupd.service": {
                "name": "fwupd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "iscsi-shutdown.service": {
                "name": "iscsi-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsi.service": {
                "name": "iscsi.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsid.service": {
                "name": "iscsid.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-activation-early.service": {
                "name": "lvm2-activation-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_multipath.service": {
                "name": "modprobe@dm_multipath.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "passim.service": {
                "name": "passim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-halt.service": {
                "name": "plymouth-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-kexec.service": {
                "name": "plymouth-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-poweroff.service": {
                "name": "plymouth-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-quit.service": {
                "name": "plymouth-quit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-read-write.service": {
                "name": "plymouth-read-write.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-reboot.service": {
                "name": "plymouth-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-switch-root-initramfs.service": {
                "name": "plymouth-switch-root-initramfs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-switch-root.service": {
                "name": "plymouth-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "raid-check.service": {
                "name": "raid-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rbdmap.service": {
                "name": "rbdmap.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-plymouth.service": {
                "name": "systemd-ask-password-plymouth.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bsod.service": {
                "name": "systemd-bsod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-homed-activate.service": {
                "name": "systemd-homed-activate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-homed.service": {
                "name": "systemd-homed.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-networkd-wait-online@.service": {
                "name": "systemd-networkd-wait-online@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "systemd-networkd.service": {
                "name": "systemd-networkd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-portabled.service": {
                "name": "systemd-portabled.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-resolved.service": {
                "name": "systemd-resolved.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-storagetm.service": {
                "name": "systemd-storagetm.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-time-wait-sync.service": {
                "name": "systemd-time-wait-sync.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-userdbd.service": {
                "name": "systemd-userdbd.service",
                "source": "systemd",
                "state": "running",
                "status": "indirect"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-zram-setup@.service": {
                "name": "systemd-zram-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-zram-setup@zram0.service": {
                "name": "systemd-zram-setup@zram0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "udisks2.service": {
                "name": "udisks2.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "unbound-anchor.service": {
                "name": "unbound-anchor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  09:58:20 -0500 (0:00:02.947)       0:00:09.643 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  09:58:20 -0500 (0:00:00.091)       0:00:09.735 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  09:58:20 -0500 (0:00:00.031)       0:00:09.766 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  09:58:21 -0500 (0:00:00.653)       0:00:10.420 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  09:58:21 -0500 (0:00:00.046)       0:00:10.467 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125865.9724185,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "999566a2dd7d7e20c05b75c7156c857649c75266",
        "ctime": 1737125865.9714186,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125865.9714186,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  09:58:21 -0500 (0:00:00.436)       0:00:10.903 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  09:58:21 -0500 (0:00:00.045)       0:00:10.948 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  09:58:21 -0500 (0:00:00.031)       0:00:10.980 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  09:58:21 -0500 (0:00:00.041)       0:00:11.021 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  09:58:21 -0500 (0:00:00.038)       0:00:11.059 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  09:58:21 -0500 (0:00:00.038)       0:00:11.097 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  09:58:21 -0500 (0:00:00.068)       0:00:11.166 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  09:58:22 -0500 (0:00:00.035)       0:00:11.201 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  09:58:22 -0500 (0:00:00.083)       0:00:11.285 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  09:58:22 -0500 (0:00:00.090)       0:00:11.376 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  09:58:22 -0500 (0:00:00.026)       0:00:11.402 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  09:58:22 -0500 (0:00:00.401)       0:00:11.804 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  09:58:22 -0500 (0:00:00.018)       0:00:11.823 ******** 
ok: [managed-node3]

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:14
Friday 17 January 2025  09:58:23 -0500 (0:00:00.967)       0:00:12.790 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Get unused disks for swap] ***********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:22
Friday 17 January 2025  09:58:23 -0500 (0:00:00.058)       0:00:12.848 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node3

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Friday 17 January 2025  09:58:23 -0500 (0:00:00.042)       0:00:12.890 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Friday 17 January 2025  09:58:25 -0500 (0:00:01.632)       0:00:14.523 ******** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sda"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"ext4\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"ext4\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/zram0\" TYPE=\"disk\" SIZE=\"3893362688\" FSTYPE=\"\" LOG-SEC=\"4096\"",
        "filename [xvda2] is a partition",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions",
        "Disk [/dev/zram0] attrs [{'type': 'disk', 'size': '3893362688', 'fstype': '', 'ssize': '4096'}] size is less than requested"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Friday 17 January 2025  09:58:26 -0500 (0:00:00.719)       0:00:15.242 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Friday 17 January 2025  09:58:26 -0500 (0:00:00.037)       0:00:15.280 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Friday 17 January 2025  09:58:26 -0500 (0:00:00.042)       0:00:15.322 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Friday 17 January 2025  09:58:26 -0500 (0:00:00.079)       0:00:15.402 ******** 
ok: [managed-node3] => {
    "unused_disks": [
        "sda"
    ]
}

TASK [Save disk used for swap] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:30
Friday 17 January 2025  09:58:26 -0500 (0:00:00.082)       0:00:15.484 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__swap_disk": "sda"
    },
    "changed": false
}

TASK [Create a disk device with swap] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:34
Friday 17 January 2025  09:58:26 -0500 (0:00:00.093)       0:00:15.580 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  09:58:26 -0500 (0:00:00.096)       0:00:15.677 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  09:58:26 -0500 (0:00:00.058)       0:00:15.736 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  09:58:26 -0500 (0:00:00.076)       0:00:15.813 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  09:58:26 -0500 (0:00:00.156)       0:00:15.969 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  09:58:26 -0500 (0:00:00.055)       0:00:16.025 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  09:58:26 -0500 (0:00:00.041)       0:00:16.067 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  09:58:26 -0500 (0:00:00.041)       0:00:16.108 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  09:58:26 -0500 (0:00:00.039)       0:00:16.148 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  09:58:27 -0500 (0:00:00.088)       0:00:16.236 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  09:58:27 -0500 (0:00:00.045)       0:00:16.282 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  09:58:27 -0500 (0:00:00.041)       0:00:16.323 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "fs_type": "swap",
            "name": "test1",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  09:58:27 -0500 (0:00:00.062)       0:00:16.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  09:58:27 -0500 (0:00:00.121)       0:00:16.506 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  09:58:27 -0500 (0:00:00.044)       0:00:16.551 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  09:58:27 -0500 (0:00:00.071)       0:00:16.622 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  09:58:27 -0500 (0:00:00.077)       0:00:16.700 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  09:58:27 -0500 (0:00:00.108)       0:00:16.808 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  09:58:27 -0500 (0:00:00.034)       0:00:16.843 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "swap"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "swap",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "none",
            "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
            "state": "present"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  09:58:29 -0500 (0:00:01.769)       0:00:18.612 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  09:58:29 -0500 (0:00:00.072)       0:00:18.684 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125865.9724185,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "999566a2dd7d7e20c05b75c7156c857649c75266",
        "ctime": 1737125865.9714186,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125865.9714186,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  09:58:29 -0500 (0:00:00.466)       0:00:19.151 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  09:58:30 -0500 (0:00:00.655)       0:00:19.806 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  09:58:30 -0500 (0:00:00.042)       0:00:19.849 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "swap"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "swap",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "none",
                "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
                "state": "present"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  09:58:30 -0500 (0:00:00.093)       0:00:19.942 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  09:58:30 -0500 (0:00:00.084)       0:00:20.027 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  09:58:30 -0500 (0:00:00.064)       0:00:20.092 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  09:58:31 -0500 (0:00:00.110)       0:00:20.202 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  09:58:32 -0500 (0:00:01.357)       0:00:21.560 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=98ac5f85-d400-46a9-b09b-18049de2d04e', 'path': 'none', 'fstype': 'swap', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'present', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "swap",
    "mount_info": {
        "dump": 0,
        "fstype": "swap",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "none",
        "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
        "state": "present"
    },
    "name": "none",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  09:58:32 -0500 (0:00:00.544)       0:00:22.105 ******** 
skipping: [managed-node3] => (item={'src': 'UUID=98ac5f85-d400-46a9-b09b-18049de2d04e', 'path': 'none', 'fstype': 'swap', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'present', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "swap",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "none",
        "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
        "state": "present"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  09:58:32 -0500 (0:00:00.057)       0:00:22.162 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  09:58:33 -0500 (0:00:00.888)       0:00:23.051 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  09:58:34 -0500 (0:00:00.478)       0:00:23.529 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  09:58:34 -0500 (0:00:00.034)       0:00:23.563 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:44
Friday 17 January 2025  09:58:35 -0500 (0:00:01.051)       0:00:24.615 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Friday 17 January 2025  09:58:35 -0500 (0:00:00.100)       0:00:24.716 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Friday 17 January 2025  09:58:35 -0500 (0:00:00.092)       0:00:24.809 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Friday 17 January 2025  09:58:35 -0500 (0:00:00.100)       0:00:24.909 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "swap",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "98ac5f85-d400-46a9-b09b-18049de2d04e"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "3b8f3f3c-04e0-475c-a029-4f3419b194d4"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Friday 17 January 2025  09:58:36 -0500 (0:00:00.656)       0:00:25.566 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003397",
    "end": "2025-01-17 09:58:37.010105",
    "rc": 0,
    "start": "2025-01-17 09:58:37.006708"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:07 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=3b8f3f3c-04e0-475c-a029-4f3419b194d4 /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=98ac5f85-d400-46a9-b09b-18049de2d04e none swap defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Friday 17 January 2025  09:58:37 -0500 (0:00:00.751)       0:00:26.317 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003562",
    "end": "2025-01-17 09:58:37.565604",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-17 09:58:37.562042"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Friday 17 January 2025  09:58:37 -0500 (0:00:00.506)       0:00:26.823 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Friday 17 January 2025  09:58:37 -0500 (0:00:00.028)       0:00:26.852 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'swap', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sda', '_raw_device': '/dev/sda', '_mount_id': 'UUID=98ac5f85-d400-46a9-b09b-18049de2d04e', '_kernel_device': '/dev/sda', '_raw_kernel_device': '/dev/sda'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  09:58:37 -0500 (0:00:00.073)       0:00:26.925 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  09:58:37 -0500 (0:00:00.047)       0:00:26.973 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  09:58:37 -0500 (0:00:00.119)       0:00:27.093 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  09:58:37 -0500 (0:00:00.035)       0:00:27.128 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "[SWAP]",
        "storage_test_swap_expected_matches": "1"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  09:58:38 -0500 (0:00:00.069)       0:00:27.197 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  09:58:38 -0500 (0:00:00.038)       0:00:27.236 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  09:58:38 -0500 (0:00:00.045)       0:00:27.281 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  09:58:38 -0500 (0:00:00.037)       0:00:27.318 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  09:58:38 -0500 (0:00:00.041)       0:00:27.360 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  09:58:38 -0500 (0:00:00.031)       0:00:27.391 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/sda"
    ],
    "delta": "0:00:00.003596",
    "end": "2025-01-17 09:58:38.562726",
    "rc": 0,
    "start": "2025-01-17 09:58:38.559130"
}

STDOUT:

/dev/sda

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  09:58:38 -0500 (0:00:00.439)       0:00:27.831 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/proc/swaps"
    ],
    "delta": "0:00:00.003365",
    "end": "2025-01-17 09:58:39.009927",
    "rc": 0,
    "start": "2025-01-17 09:58:39.006562"
}

STDOUT:

Filename				Type		Size		Used		Priority
/dev/zram0                              partition	3802108		8448		100
/dev/sda                                partition	10485756	0		-2

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  09:58:39 -0500 (0:00:00.456)       0:00:28.288 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  09:58:39 -0500 (0:00:00.165)       0:00:28.454 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  09:58:39 -0500 (0:00:00.060)       0:00:28.514 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [
            "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e "
        ],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  09:58:39 -0500 (0:00:00.102)       0:00:28.617 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  09:58:39 -0500 (0:00:00.072)       0:00:28.689 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  09:58:39 -0500 (0:00:00.102)       0:00:28.792 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  09:58:39 -0500 (0:00:00.072)       0:00:28.865 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  09:58:39 -0500 (0:00:00.105)       0:00:28.971 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  09:58:39 -0500 (0:00:00.073)       0:00:29.044 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  09:58:39 -0500 (0:00:00.130)       0:00:29.174 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  09:58:40 -0500 (0:00:00.094)       0:00:29.268 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125909.2735782,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737125909.241578,
        "dev": 6,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 559,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737125909.241578,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  09:58:40 -0500 (0:00:00.486)       0:00:29.754 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  09:58:40 -0500 (0:00:00.046)       0:00:29.801 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  09:58:40 -0500 (0:00:00.089)       0:00:29.891 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  09:58:40 -0500 (0:00:00.054)       0:00:29.945 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  09:58:40 -0500 (0:00:00.046)       0:00:29.991 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  09:58:40 -0500 (0:00:00.040)       0:00:30.032 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  09:58:40 -0500 (0:00:00.047)       0:00:30.080 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  09:58:40 -0500 (0:00:00.038)       0:00:30.118 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  09:58:42 -0500 (0:00:01.603)       0:00:31.721 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  09:58:42 -0500 (0:00:00.026)       0:00:31.748 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  09:58:42 -0500 (0:00:00.026)       0:00:31.774 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  09:58:42 -0500 (0:00:00.074)       0:00:31.849 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  09:58:42 -0500 (0:00:00.035)       0:00:31.884 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  09:58:42 -0500 (0:00:00.037)       0:00:31.921 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  09:58:42 -0500 (0:00:00.040)       0:00:31.962 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  09:58:42 -0500 (0:00:00.037)       0:00:32.000 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  09:58:42 -0500 (0:00:00.040)       0:00:32.040 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  09:58:42 -0500 (0:00:00.082)       0:00:32.122 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  09:58:43 -0500 (0:00:00.076)       0:00:32.198 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  09:58:43 -0500 (0:00:00.075)       0:00:32.274 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  09:58:43 -0500 (0:00:00.079)       0:00:32.353 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  09:58:43 -0500 (0:00:00.072)       0:00:32.426 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  09:58:43 -0500 (0:00:00.039)       0:00:32.465 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  09:58:43 -0500 (0:00:00.037)       0:00:32.503 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  09:58:43 -0500 (0:00:00.037)       0:00:32.541 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  09:58:43 -0500 (0:00:00.036)       0:00:32.577 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  09:58:43 -0500 (0:00:00.041)       0:00:32.618 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  09:58:43 -0500 (0:00:00.054)       0:00:32.673 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  09:58:43 -0500 (0:00:00.042)       0:00:32.716 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  09:58:43 -0500 (0:00:00.033)       0:00:32.750 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  09:58:43 -0500 (0:00:00.040)       0:00:32.791 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  09:58:43 -0500 (0:00:00.034)       0:00:32.825 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  09:58:43 -0500 (0:00:00.050)       0:00:32.876 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  09:58:43 -0500 (0:00:00.084)       0:00:32.960 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  09:58:43 -0500 (0:00:00.107)       0:00:33.067 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  09:58:44 -0500 (0:00:00.128)       0:00:33.196 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  09:58:44 -0500 (0:00:00.085)       0:00:33.281 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  09:58:44 -0500 (0:00:00.126)       0:00:33.408 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  09:58:44 -0500 (0:00:00.107)       0:00:33.515 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  09:58:44 -0500 (0:00:00.120)       0:00:33.635 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  09:58:44 -0500 (0:00:00.138)       0:00:33.774 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  09:58:44 -0500 (0:00:00.078)       0:00:33.853 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  09:58:44 -0500 (0:00:00.038)       0:00:33.892 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  09:58:44 -0500 (0:00:00.041)       0:00:33.934 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  09:58:44 -0500 (0:00:00.039)       0:00:33.973 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  09:58:44 -0500 (0:00:00.037)       0:00:34.011 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  09:58:44 -0500 (0:00:00.034)       0:00:34.046 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  09:58:44 -0500 (0:00:00.034)       0:00:34.081 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  09:58:44 -0500 (0:00:00.033)       0:00:34.114 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  09:58:44 -0500 (0:00:00.033)       0:00:34.147 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  09:58:45 -0500 (0:00:00.045)       0:00:34.193 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  09:58:45 -0500 (0:00:00.053)       0:00:34.246 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  09:58:45 -0500 (0:00:00.063)       0:00:34.310 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  09:58:45 -0500 (0:00:00.037)       0:00:34.347 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  09:58:45 -0500 (0:00:00.038)       0:00:34.386 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  09:58:45 -0500 (0:00:00.055)       0:00:34.441 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  09:58:45 -0500 (0:00:00.068)       0:00:34.510 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  09:58:45 -0500 (0:00:00.078)       0:00:34.588 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  09:58:45 -0500 (0:00:00.083)       0:00:34.671 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  09:58:45 -0500 (0:00:00.132)       0:00:34.804 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  09:58:45 -0500 (0:00:00.088)       0:00:34.893 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  09:58:45 -0500 (0:00:00.068)       0:00:34.961 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  09:58:45 -0500 (0:00:00.053)       0:00:35.014 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  09:58:45 -0500 (0:00:00.034)       0:00:35.049 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  09:58:45 -0500 (0:00:00.052)       0:00:35.102 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  09:58:45 -0500 (0:00:00.035)       0:00:35.138 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  09:58:45 -0500 (0:00:00.034)       0:00:35.172 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Friday 17 January 2025  09:58:46 -0500 (0:00:00.036)       0:00:35.208 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Get disk to use for non-swap device] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:47
Friday 17 January 2025  09:58:46 -0500 (0:00:00.054)       0:00:35.263 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node3

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Friday 17 January 2025  09:58:46 -0500 (0:00:00.164)       0:00:35.427 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Friday 17 January 2025  09:58:47 -0500 (0:00:01.688)       0:00:37.116 ******** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sdb"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"swap\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"ext4\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"ext4\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/zram0\" TYPE=\"disk\" SIZE=\"3893362688\" FSTYPE=\"\" LOG-SEC=\"4096\"",
        "Disk [/dev/sda] attrs [{'type': 'disk', 'size': '10737418240', 'fstype': 'swap', 'ssize': '512'}] has fstype",
        "filename [xvda2] is a partition",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions",
        "Disk [/dev/zram0] attrs [{'type': 'disk', 'size': '3893362688', 'fstype': '', 'ssize': '4096'}] size is less than requested"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Friday 17 January 2025  09:58:49 -0500 (0:00:01.585)       0:00:38.702 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Friday 17 January 2025  09:58:49 -0500 (0:00:00.044)       0:00:38.746 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sdb"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Friday 17 January 2025  09:58:49 -0500 (0:00:00.043)       0:00:38.790 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Friday 17 January 2025  09:58:49 -0500 (0:00:00.090)       0:00:38.880 ******** 
ok: [managed-node3] => {
    "unused_disks": [
        "sdb"
    ]
}

TASK [Save non-swap disk] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:54
Friday 17 January 2025  09:58:49 -0500 (0:00:00.040)       0:00:38.920 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__non_swap_disk": "sdb"
    },
    "changed": false
}

TASK [Format second disk as ext3] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:58
Friday 17 January 2025  09:58:49 -0500 (0:00:00.051)       0:00:38.972 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  09:58:49 -0500 (0:00:00.100)       0:00:39.072 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  09:58:49 -0500 (0:00:00.094)       0:00:39.167 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  09:58:50 -0500 (0:00:00.088)       0:00:39.255 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  09:58:50 -0500 (0:00:00.083)       0:00:39.338 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  09:58:50 -0500 (0:00:00.043)       0:00:39.382 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  09:58:50 -0500 (0:00:00.044)       0:00:39.427 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  09:58:50 -0500 (0:00:00.037)       0:00:39.465 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  09:58:50 -0500 (0:00:00.036)       0:00:39.501 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  09:58:50 -0500 (0:00:00.108)       0:00:39.609 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  09:58:50 -0500 (0:00:00.042)       0:00:39.652 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  09:58:50 -0500 (0:00:00.060)       0:00:39.712 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sdb"
            ],
            "fs_type": "ext3",
            "mount_point": "none",
            "name": "test2",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  09:58:50 -0500 (0:00:00.042)       0:00:39.755 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  09:58:50 -0500 (0:00:00.046)       0:00:39.801 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  09:58:50 -0500 (0:00:00.068)       0:00:39.869 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  09:58:50 -0500 (0:00:00.068)       0:00:39.938 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  09:58:50 -0500 (0:00:00.050)       0:00:39.989 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  09:58:50 -0500 (0:00:00.186)       0:00:40.175 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  09:58:51 -0500 (0:00:00.069)       0:00:40.245 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "ext3"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sdb",
            "_kernel_device": "/dev/sdb",
            "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
            "_raw_device": "/dev/sdb",
            "_raw_kernel_device": "/dev/sdb",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "none",
            "mount_user": null,
            "name": "test2",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  09:58:55 -0500 (0:00:04.838)       0:00:45.083 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  09:58:55 -0500 (0:00:00.045)       0:00:45.129 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125912.85259,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "7fb8ba9557e801a2d76ed4e689ee2936c0a8def2",
        "ctime": 1737125912.8515902,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125912.8515902,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1431,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  09:58:56 -0500 (0:00:00.434)       0:00:45.563 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  09:58:56 -0500 (0:00:00.414)       0:00:45.978 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  09:58:56 -0500 (0:00:00.026)       0:00:46.004 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "ext3"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sdb",
                "_kernel_device": "/dev/sdb",
                "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
                "_raw_device": "/dev/sdb",
                "_raw_kernel_device": "/dev/sdb",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "none",
                "mount_user": null,
                "name": "test2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  09:58:56 -0500 (0:00:00.045)       0:00:46.050 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  09:58:56 -0500 (0:00:00.043)       0:00:46.093 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sdb",
                "_kernel_device": "/dev/sdb",
                "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
                "_raw_device": "/dev/sdb",
                "_raw_kernel_device": "/dev/sdb",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "none",
                "mount_user": null,
                "name": "test2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  09:58:56 -0500 (0:00:00.045)       0:00:46.139 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  09:58:57 -0500 (0:00:00.075)       0:00:46.214 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  09:58:57 -0500 (0:00:00.024)       0:00:46.239 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  09:58:57 -0500 (0:00:00.053)       0:00:46.293 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  09:58:57 -0500 (0:00:00.049)       0:00:46.342 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  09:58:57 -0500 (0:00:00.024)       0:00:46.367 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  09:58:57 -0500 (0:00:00.438)       0:00:46.805 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  09:58:57 -0500 (0:00:00.043)       0:00:46.849 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:69
Friday 17 January 2025  09:58:58 -0500 (0:00:00.980)       0:00:47.829 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Friday 17 January 2025  09:58:58 -0500 (0:00:00.062)       0:00:47.892 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Friday 17 January 2025  09:58:58 -0500 (0:00:00.067)       0:00:47.960 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sdb",
            "_kernel_device": "/dev/sdb",
            "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
            "_raw_device": "/dev/sdb",
            "_raw_kernel_device": "/dev/sdb",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "none",
            "mount_user": null,
            "name": "test2",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Friday 17 January 2025  09:58:58 -0500 (0:00:00.075)       0:00:48.036 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "swap",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "98ac5f85-d400-46a9-b09b-18049de2d04e"
        },
        "/dev/sdb": {
            "fstype": "ext3",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "dd89b050-dae2-4398-a070-585dc37b7eff"
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "3b8f3f3c-04e0-475c-a029-4f3419b194d4"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Friday 17 January 2025  09:58:59 -0500 (0:00:00.422)       0:00:48.458 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003875",
    "end": "2025-01-17 09:58:59.608899",
    "rc": 0,
    "start": "2025-01-17 09:58:59.605024"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:07 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=3b8f3f3c-04e0-475c-a029-4f3419b194d4 /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=98ac5f85-d400-46a9-b09b-18049de2d04e none swap defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Friday 17 January 2025  09:58:59 -0500 (0:00:00.400)       0:00:48.858 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003365",
    "end": "2025-01-17 09:59:00.007825",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-17 09:59:00.004460"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Friday 17 January 2025  09:59:00 -0500 (0:00:00.440)       0:00:49.299 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Friday 17 January 2025  09:59:00 -0500 (0:00:00.055)       0:00:49.355 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext3', 'mount_options': 'defaults', 'mount_point': 'none', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sdb'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sdb', '_raw_device': '/dev/sdb', '_mount_id': 'UUID=dd89b050-dae2-4398-a070-585dc37b7eff', '_kernel_device': '/dev/sdb', '_raw_kernel_device': '/dev/sdb'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  09:59:00 -0500 (0:00:00.117)       0:00:49.472 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  09:59:00 -0500 (0:00:00.075)       0:00:49.548 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  09:59:00 -0500 (0:00:00.277)       0:00:49.825 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sdb"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  09:59:00 -0500 (0:00:00.037)       0:00:49.862 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  09:59:00 -0500 (0:00:00.062)       0:00:49.924 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  09:59:00 -0500 (0:00:00.026)       0:00:49.951 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  09:59:00 -0500 (0:00:00.032)       0:00:49.983 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  09:59:00 -0500 (0:00:00.021)       0:00:50.005 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  09:59:00 -0500 (0:00:00.021)       0:00:50.027 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  09:59:00 -0500 (0:00:00.021)       0:00:50.049 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  09:59:00 -0500 (0:00:00.022)       0:00:50.071 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  09:59:00 -0500 (0:00:00.024)       0:00:50.095 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  09:59:00 -0500 (0:00:00.022)       0:00:50.118 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  09:59:00 -0500 (0:00:00.023)       0:00:50.142 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  09:59:01 -0500 (0:00:00.072)       0:00:50.214 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  09:59:01 -0500 (0:00:00.069)       0:00:50.284 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  09:59:01 -0500 (0:00:00.096)       0:00:50.381 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  09:59:01 -0500 (0:00:00.074)       0:00:50.455 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  09:59:01 -0500 (0:00:00.063)       0:00:50.519 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  09:59:01 -0500 (0:00:00.029)       0:00:50.548 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  09:59:01 -0500 (0:00:00.061)       0:00:50.610 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  09:59:01 -0500 (0:00:00.050)       0:00:50.661 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125935.7676668,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737125935.7676668,
        "dev": 6,
        "device_type": 2064,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 572,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737125935.7676668,
        "nlink": 1,
        "path": "/dev/sdb",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  09:59:01 -0500 (0:00:00.429)       0:00:51.090 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  09:59:01 -0500 (0:00:00.035)       0:00:51.126 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  09:59:01 -0500 (0:00:00.027)       0:00:51.153 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  09:59:01 -0500 (0:00:00.029)       0:00:51.182 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  09:59:02 -0500 (0:00:00.026)       0:00:51.209 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  09:59:02 -0500 (0:00:00.023)       0:00:51.233 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  09:59:02 -0500 (0:00:00.033)       0:00:51.266 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  09:59:02 -0500 (0:00:00.025)       0:00:51.291 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  09:59:03 -0500 (0:00:01.625)       0:00:52.916 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  09:59:03 -0500 (0:00:00.034)       0:00:52.951 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  09:59:03 -0500 (0:00:00.022)       0:00:52.974 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  09:59:03 -0500 (0:00:00.058)       0:00:53.033 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  09:59:03 -0500 (0:00:00.056)       0:00:53.089 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  09:59:03 -0500 (0:00:00.065)       0:00:53.154 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  09:59:04 -0500 (0:00:00.045)       0:00:53.199 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  09:59:04 -0500 (0:00:00.045)       0:00:53.245 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  09:59:04 -0500 (0:00:00.048)       0:00:53.293 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  09:59:04 -0500 (0:00:00.086)       0:00:53.380 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  09:59:04 -0500 (0:00:00.082)       0:00:53.462 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  09:59:04 -0500 (0:00:00.079)       0:00:53.542 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  09:59:04 -0500 (0:00:00.073)       0:00:53.615 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  09:59:04 -0500 (0:00:00.066)       0:00:53.681 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  09:59:04 -0500 (0:00:00.040)       0:00:53.722 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  09:59:04 -0500 (0:00:00.038)       0:00:53.760 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  09:59:04 -0500 (0:00:00.037)       0:00:53.798 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  09:59:04 -0500 (0:00:00.041)       0:00:53.839 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  09:59:04 -0500 (0:00:00.037)       0:00:53.876 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  09:59:04 -0500 (0:00:00.038)       0:00:53.915 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  09:59:04 -0500 (0:00:00.049)       0:00:53.964 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  09:59:04 -0500 (0:00:00.065)       0:00:54.030 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  09:59:04 -0500 (0:00:00.035)       0:00:54.065 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  09:59:04 -0500 (0:00:00.038)       0:00:54.104 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  09:59:04 -0500 (0:00:00.036)       0:00:54.141 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  09:59:05 -0500 (0:00:00.065)       0:00:54.206 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  09:59:05 -0500 (0:00:00.068)       0:00:54.275 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  09:59:05 -0500 (0:00:00.065)       0:00:54.340 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  09:59:05 -0500 (0:00:00.043)       0:00:54.383 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  09:59:05 -0500 (0:00:00.075)       0:00:54.459 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  09:59:05 -0500 (0:00:00.067)       0:00:54.526 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  09:59:05 -0500 (0:00:00.075)       0:00:54.601 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  09:59:05 -0500 (0:00:00.105)       0:00:54.707 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  09:59:05 -0500 (0:00:00.084)       0:00:54.792 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  09:59:05 -0500 (0:00:00.046)       0:00:54.839 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  09:59:05 -0500 (0:00:00.111)       0:00:54.950 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  09:59:05 -0500 (0:00:00.046)       0:00:54.997 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  09:59:05 -0500 (0:00:00.038)       0:00:55.036 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  09:59:05 -0500 (0:00:00.065)       0:00:55.102 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  09:59:05 -0500 (0:00:00.038)       0:00:55.140 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  09:59:05 -0500 (0:00:00.039)       0:00:55.180 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  09:59:06 -0500 (0:00:00.040)       0:00:55.221 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  09:59:06 -0500 (0:00:00.039)       0:00:55.260 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  09:59:06 -0500 (0:00:00.036)       0:00:55.297 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  09:59:06 -0500 (0:00:00.035)       0:00:55.333 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  09:59:06 -0500 (0:00:00.035)       0:00:55.368 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  09:59:06 -0500 (0:00:00.054)       0:00:55.422 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  09:59:06 -0500 (0:00:00.069)       0:00:55.492 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  09:59:06 -0500 (0:00:00.063)       0:00:55.555 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  09:59:06 -0500 (0:00:00.041)       0:00:55.597 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  09:59:06 -0500 (0:00:00.040)       0:00:55.637 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  09:59:06 -0500 (0:00:00.098)       0:00:55.736 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  09:59:06 -0500 (0:00:00.055)       0:00:55.791 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  09:59:06 -0500 (0:00:00.038)       0:00:55.830 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  09:59:06 -0500 (0:00:00.054)       0:00:55.885 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  09:59:06 -0500 (0:00:00.037)       0:00:55.923 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  09:59:06 -0500 (0:00:00.064)       0:00:55.987 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  09:59:06 -0500 (0:00:00.051)       0:00:56.038 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  09:59:06 -0500 (0:00:00.045)       0:00:56.083 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Friday 17 January 2025  09:59:06 -0500 (0:00:00.082)       0:00:56.166 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Change the disk device file system type from swap to ext3] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:72
Friday 17 January 2025  09:59:07 -0500 (0:00:00.079)       0:00:56.245 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  09:59:07 -0500 (0:00:00.192)       0:00:56.437 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  09:59:07 -0500 (0:00:00.107)       0:00:56.545 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  09:59:07 -0500 (0:00:00.116)       0:00:56.662 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  09:59:07 -0500 (0:00:00.126)       0:00:56.788 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  09:59:07 -0500 (0:00:00.078)       0:00:56.866 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  09:59:07 -0500 (0:00:00.070)       0:00:56.936 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  09:59:07 -0500 (0:00:00.076)       0:00:57.013 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  09:59:07 -0500 (0:00:00.044)       0:00:57.057 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  09:59:07 -0500 (0:00:00.107)       0:00:57.165 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  09:59:08 -0500 (0:00:00.040)       0:00:57.205 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  09:59:08 -0500 (0:00:00.055)       0:00:57.261 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "fs_type": "ext3",
            "mount_point": "/opt/test",
            "name": "test1",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  09:59:08 -0500 (0:00:00.035)       0:00:57.296 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  09:59:08 -0500 (0:00:00.032)       0:00:57.328 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  09:59:08 -0500 (0:00:00.030)       0:00:57.359 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  09:59:08 -0500 (0:00:00.028)       0:00:57.388 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  09:59:08 -0500 (0:00:00.028)       0:00:57.416 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  09:59:08 -0500 (0:00:00.064)       0:00:57.481 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  09:59:08 -0500 (0:00:00.021)       0:00:57.503 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "swap"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "ext3"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "fstype": "swap",
            "path": "none",
            "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "ext3",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test",
            "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
            "state": "mounted"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test",
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  09:59:13 -0500 (0:00:04.719)       0:01:02.222 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  09:59:13 -0500 (0:00:00.069)       0:01:02.292 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125912.85259,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "7fb8ba9557e801a2d76ed4e689ee2936c0a8def2",
        "ctime": 1737125912.8515902,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125912.8515902,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1431,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  09:59:13 -0500 (0:00:00.457)       0:01:02.750 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  09:59:14 -0500 (0:00:00.439)       0:01:03.189 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  09:59:14 -0500 (0:00:00.028)       0:01:03.218 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "swap"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "ext3"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "fstype": "swap",
                "path": "none",
                "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "ext3",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test",
                "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
                "state": "mounted"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test",
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  09:59:14 -0500 (0:00:00.034)       0:01:03.253 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  09:59:14 -0500 (0:00:00.027)       0:01:03.280 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test",
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  09:59:14 -0500 (0:00:00.028)       0:01:03.309 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=98ac5f85-d400-46a9-b09b-18049de2d04e', 'path': 'none', 'state': 'absent', 'fstype': 'swap'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "swap",
    "mount_info": {
        "fstype": "swap",
        "path": "none",
        "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e",
        "state": "absent"
    },
    "name": "none",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=98ac5f85-d400-46a9-b09b-18049de2d04e"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  09:59:14 -0500 (0:00:00.460)       0:01:03.769 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  09:59:15 -0500 (0:00:00.938)       0:01:04.708 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=43d330a0-945a-4a80-983d-e14ce9cb7665', 'path': '/opt/test', 'fstype': 'ext3', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext3",
    "mount_info": {
        "dump": 0,
        "fstype": "ext3",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test",
        "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
        "state": "mounted"
    },
    "name": "/opt/test",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  09:59:16 -0500 (0:00:00.493)       0:01:05.201 ******** 
skipping: [managed-node3] => (item={'src': 'UUID=43d330a0-945a-4a80-983d-e14ce9cb7665', 'path': '/opt/test', 'fstype': 'ext3', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "ext3",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test",
        "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  09:59:16 -0500 (0:00:00.069)       0:01:05.271 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  09:59:16 -0500 (0:00:00.911)       0:01:06.182 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  09:59:17 -0500 (0:00:00.547)       0:01:06.730 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  09:59:17 -0500 (0:00:00.065)       0:01:06.795 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:83
Friday 17 January 2025  09:59:18 -0500 (0:00:01.162)       0:01:07.957 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Friday 17 January 2025  09:59:18 -0500 (0:00:00.053)       0:01:08.011 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Friday 17 January 2025  09:59:18 -0500 (0:00:00.046)       0:01:08.057 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test",
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Friday 17 January 2025  09:59:18 -0500 (0:00:00.058)       0:01:08.116 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "ext3",
            "label": "",
            "mountpoint": "/opt/test",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "43d330a0-945a-4a80-983d-e14ce9cb7665"
        },
        "/dev/sdb": {
            "fstype": "ext3",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "dd89b050-dae2-4398-a070-585dc37b7eff"
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "3b8f3f3c-04e0-475c-a029-4f3419b194d4"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Friday 17 January 2025  09:59:19 -0500 (0:00:00.456)       0:01:08.572 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.005023",
    "end": "2025-01-17 09:59:20.801075",
    "rc": 0,
    "start": "2025-01-17 09:59:19.796052"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:07 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=3b8f3f3c-04e0-475c-a029-4f3419b194d4 /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=43d330a0-945a-4a80-983d-e14ce9cb7665 /opt/test ext3 defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Friday 17 January 2025  09:59:20 -0500 (0:00:01.509)       0:01:10.081 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003239",
    "end": "2025-01-17 09:59:21.277099",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-17 09:59:21.273860"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Friday 17 January 2025  09:59:21 -0500 (0:00:00.463)       0:01:10.545 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Friday 17 January 2025  09:59:21 -0500 (0:00:00.035)       0:01:10.581 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext3', 'mount_options': 'defaults', 'mount_point': '/opt/test', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sda', '_raw_device': '/dev/sda', '_mount_id': 'UUID=43d330a0-945a-4a80-983d-e14ce9cb7665', '_kernel_device': '/dev/sda', '_raw_kernel_device': '/dev/sda'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  09:59:21 -0500 (0:00:00.118)       0:01:10.700 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  09:59:21 -0500 (0:00:00.080)       0:01:10.781 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  09:59:21 -0500 (0:00:00.192)       0:01:10.973 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  09:59:21 -0500 (0:00:00.056)       0:01:11.029 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  09:59:21 -0500 (0:00:00.072)       0:01:11.102 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  09:59:21 -0500 (0:00:00.062)       0:01:11.165 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  09:59:22 -0500 (0:00:00.081)       0:01:11.247 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  09:59:22 -0500 (0:00:00.062)       0:01:11.309 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  09:59:22 -0500 (0:00:00.036)       0:01:11.346 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  09:59:22 -0500 (0:00:00.045)       0:01:11.391 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  09:59:22 -0500 (0:00:00.044)       0:01:11.435 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  09:59:22 -0500 (0:00:00.044)       0:01:11.480 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  09:59:22 -0500 (0:00:00.043)       0:01:11.523 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  09:59:22 -0500 (0:00:00.054)       0:01:11.578 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test ext3 defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  09:59:22 -0500 (0:00:00.106)       0:01:11.684 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  09:59:22 -0500 (0:00:00.073)       0:01:11.757 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  09:59:22 -0500 (0:00:00.086)       0:01:11.844 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  09:59:22 -0500 (0:00:00.105)       0:01:11.950 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  09:59:22 -0500 (0:00:00.082)       0:01:12.033 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  09:59:22 -0500 (0:00:00.041)       0:01:12.075 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  09:59:23 -0500 (0:00:00.136)       0:01:12.211 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  09:59:23 -0500 (0:00:00.153)       0:01:12.365 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125952.8907242,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737125952.8907242,
        "dev": 6,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 559,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737125952.8907242,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  09:59:23 -0500 (0:00:00.537)       0:01:12.903 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  09:59:23 -0500 (0:00:00.083)       0:01:12.987 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  09:59:23 -0500 (0:00:00.059)       0:01:13.046 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  09:59:23 -0500 (0:00:00.061)       0:01:13.108 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  09:59:23 -0500 (0:00:00.048)       0:01:13.157 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  09:59:24 -0500 (0:00:00.038)       0:01:13.196 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  09:59:24 -0500 (0:00:00.052)       0:01:13.249 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  09:59:24 -0500 (0:00:00.037)       0:01:13.286 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  09:59:25 -0500 (0:00:01.596)       0:01:14.883 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  09:59:25 -0500 (0:00:00.037)       0:01:14.920 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  09:59:25 -0500 (0:00:00.034)       0:01:14.955 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  09:59:25 -0500 (0:00:00.076)       0:01:15.031 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  09:59:25 -0500 (0:00:00.035)       0:01:15.067 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  09:59:25 -0500 (0:00:00.034)       0:01:15.102 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  09:59:25 -0500 (0:00:00.035)       0:01:15.137 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  09:59:25 -0500 (0:00:00.047)       0:01:15.184 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  09:59:26 -0500 (0:00:00.039)       0:01:15.224 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  09:59:26 -0500 (0:00:00.081)       0:01:15.306 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  09:59:26 -0500 (0:00:00.079)       0:01:15.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  09:59:26 -0500 (0:00:00.102)       0:01:15.488 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  09:59:26 -0500 (0:00:00.086)       0:01:15.574 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  09:59:26 -0500 (0:00:00.076)       0:01:15.651 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  09:59:26 -0500 (0:00:00.039)       0:01:15.690 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  09:59:26 -0500 (0:00:00.035)       0:01:15.726 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  09:59:26 -0500 (0:00:00.035)       0:01:15.761 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  09:59:26 -0500 (0:00:00.034)       0:01:15.795 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  09:59:26 -0500 (0:00:00.037)       0:01:15.833 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  09:59:26 -0500 (0:00:00.084)       0:01:15.917 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  09:59:26 -0500 (0:00:00.037)       0:01:15.955 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  09:59:26 -0500 (0:00:00.034)       0:01:15.989 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  09:59:26 -0500 (0:00:00.036)       0:01:16.026 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  09:59:26 -0500 (0:00:00.035)       0:01:16.062 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  09:59:26 -0500 (0:00:00.035)       0:01:16.097 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  09:59:26 -0500 (0:00:00.083)       0:01:16.181 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  09:59:27 -0500 (0:00:00.067)       0:01:16.249 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  09:59:27 -0500 (0:00:00.066)       0:01:16.316 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  09:59:27 -0500 (0:00:00.042)       0:01:16.358 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  09:59:27 -0500 (0:00:00.065)       0:01:16.424 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  09:59:27 -0500 (0:00:00.070)       0:01:16.495 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  09:59:27 -0500 (0:00:00.099)       0:01:16.595 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  09:59:27 -0500 (0:00:00.065)       0:01:16.661 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  09:59:27 -0500 (0:00:00.064)       0:01:16.725 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  09:59:27 -0500 (0:00:00.035)       0:01:16.761 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  09:59:27 -0500 (0:00:00.035)       0:01:16.796 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  09:59:27 -0500 (0:00:00.034)       0:01:16.830 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  09:59:27 -0500 (0:00:00.039)       0:01:16.869 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  09:59:27 -0500 (0:00:00.026)       0:01:16.896 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  09:59:27 -0500 (0:00:00.026)       0:01:16.922 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  09:59:27 -0500 (0:00:00.023)       0:01:16.946 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  09:59:27 -0500 (0:00:00.026)       0:01:16.973 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  09:59:27 -0500 (0:00:00.029)       0:01:17.002 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  09:59:27 -0500 (0:00:00.029)       0:01:17.031 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  09:59:27 -0500 (0:00:00.023)       0:01:17.055 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  09:59:27 -0500 (0:00:00.023)       0:01:17.079 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  09:59:27 -0500 (0:00:00.023)       0:01:17.102 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  09:59:27 -0500 (0:00:00.022)       0:01:17.124 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  09:59:27 -0500 (0:00:00.022)       0:01:17.147 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  09:59:27 -0500 (0:00:00.028)       0:01:17.175 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  09:59:28 -0500 (0:00:00.027)       0:01:17.203 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  09:59:28 -0500 (0:00:00.044)       0:01:17.247 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  09:59:28 -0500 (0:00:00.024)       0:01:17.271 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  09:59:28 -0500 (0:00:00.023)       0:01:17.294 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  09:59:28 -0500 (0:00:00.023)       0:01:17.318 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  09:59:28 -0500 (0:00:00.026)       0:01:17.345 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  09:59:28 -0500 (0:00:00.023)       0:01:17.369 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  09:59:28 -0500 (0:00:00.024)       0:01:17.393 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  09:59:28 -0500 (0:00:00.023)       0:01:17.417 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Friday 17 January 2025  09:59:28 -0500 (0:00:00.025)       0:01:17.442 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:86
Friday 17 January 2025  09:59:28 -0500 (0:00:00.024)       0:01:17.466 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  09:59:28 -0500 (0:00:00.075)       0:01:17.542 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  09:59:28 -0500 (0:00:00.036)       0:01:17.578 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  09:59:28 -0500 (0:00:00.069)       0:01:17.647 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  09:59:28 -0500 (0:00:00.077)       0:01:17.725 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  09:59:28 -0500 (0:00:00.045)       0:01:17.770 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  09:59:28 -0500 (0:00:00.045)       0:01:17.816 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  09:59:28 -0500 (0:00:00.038)       0:01:17.854 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  09:59:28 -0500 (0:00:00.042)       0:01:17.897 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  09:59:28 -0500 (0:00:00.093)       0:01:17.990 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  09:59:28 -0500 (0:00:00.046)       0:01:18.036 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  09:59:28 -0500 (0:00:00.041)       0:01:18.078 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "fs_type": "ext3",
            "mount_point": "/opt/test",
            "name": "test1",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  09:59:28 -0500 (0:00:00.051)       0:01:18.130 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  09:59:28 -0500 (0:00:00.044)       0:01:18.174 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  09:59:29 -0500 (0:00:00.048)       0:01:18.222 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  09:59:29 -0500 (0:00:00.047)       0:01:18.270 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  09:59:29 -0500 (0:00:00.047)       0:01:18.317 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  09:59:29 -0500 (0:00:00.102)       0:01:18.419 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  09:59:29 -0500 (0:00:00.036)       0:01:18.456 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "ext3",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test",
            "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
            "state": "mounted"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test",
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  09:59:30 -0500 (0:00:01.691)       0:01:20.147 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  09:59:31 -0500 (0:00:00.074)       0:01:20.222 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125955.9247344,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "e79bed29aedb83cd9b5e6740eccfb0323a0b2a88",
        "ctime": 1737125955.9237344,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125955.9237344,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  09:59:31 -0500 (0:00:00.443)       0:01:20.666 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  09:59:31 -0500 (0:00:00.028)       0:01:20.694 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  09:59:31 -0500 (0:00:00.021)       0:01:20.715 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "ext3",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test",
                "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
                "state": "mounted"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test",
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  09:59:31 -0500 (0:00:00.058)       0:01:20.774 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  09:59:31 -0500 (0:00:00.049)       0:01:20.824 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test",
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  09:59:31 -0500 (0:00:00.046)       0:01:20.871 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  09:59:31 -0500 (0:00:00.074)       0:01:20.946 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  09:59:32 -0500 (0:00:00.899)       0:01:21.845 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node3] => (item={'src': 'UUID=43d330a0-945a-4a80-983d-e14ce9cb7665', 'path': '/opt/test', 'fstype': 'ext3', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext3",
    "mount_info": {
        "dump": 0,
        "fstype": "ext3",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test",
        "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
        "state": "mounted"
    },
    "name": "/opt/test",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  09:59:33 -0500 (0:00:00.450)       0:01:22.296 ******** 
skipping: [managed-node3] => (item={'src': 'UUID=43d330a0-945a-4a80-983d-e14ce9cb7665', 'path': '/opt/test', 'fstype': 'ext3', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "ext3",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test",
        "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  09:59:33 -0500 (0:00:00.059)       0:01:22.356 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  09:59:34 -0500 (0:00:00.867)       0:01:23.224 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  09:59:34 -0500 (0:00:00.512)       0:01:23.736 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  09:59:34 -0500 (0:00:00.042)       0:01:23.779 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:97
Friday 17 January 2025  09:59:35 -0500 (0:00:01.101)       0:01:24.881 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Friday 17 January 2025  09:59:35 -0500 (0:00:00.171)       0:01:25.052 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Friday 17 January 2025  09:59:35 -0500 (0:00:00.088)       0:01:25.140 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test",
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Friday 17 January 2025  09:59:36 -0500 (0:00:00.141)       0:01:25.281 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "ext3",
            "label": "",
            "mountpoint": "/opt/test",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "43d330a0-945a-4a80-983d-e14ce9cb7665"
        },
        "/dev/sdb": {
            "fstype": "ext3",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "dd89b050-dae2-4398-a070-585dc37b7eff"
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "3b8f3f3c-04e0-475c-a029-4f3419b194d4"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Friday 17 January 2025  09:59:36 -0500 (0:00:00.477)       0:01:25.759 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003284",
    "end": "2025-01-17 09:59:36.932539",
    "rc": 0,
    "start": "2025-01-17 09:59:36.929255"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:07 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=3b8f3f3c-04e0-475c-a029-4f3419b194d4 /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=43d330a0-945a-4a80-983d-e14ce9cb7665 /opt/test ext3 defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Friday 17 January 2025  09:59:37 -0500 (0:00:00.440)       0:01:26.200 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:01.004244",
    "end": "2025-01-17 09:59:38.405393",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-17 09:59:37.401149"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Friday 17 January 2025  09:59:38 -0500 (0:00:01.470)       0:01:27.670 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Friday 17 January 2025  09:59:38 -0500 (0:00:00.032)       0:01:27.703 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext3', 'mount_options': 'defaults', 'mount_point': '/opt/test', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sda', '_raw_device': '/dev/sda', '_mount_id': 'UUID=43d330a0-945a-4a80-983d-e14ce9cb7665', '_kernel_device': '/dev/sda', '_raw_kernel_device': '/dev/sda'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  09:59:38 -0500 (0:00:00.124)       0:01:27.828 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  09:59:38 -0500 (0:00:00.079)       0:01:27.907 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  09:59:38 -0500 (0:00:00.217)       0:01:28.125 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  09:59:38 -0500 (0:00:00.052)       0:01:28.177 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  09:59:39 -0500 (0:00:00.080)       0:01:28.257 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  09:59:39 -0500 (0:00:00.037)       0:01:28.295 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  09:59:39 -0500 (0:00:00.046)       0:01:28.342 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  09:59:39 -0500 (0:00:00.036)       0:01:28.379 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  09:59:39 -0500 (0:00:00.035)       0:01:28.414 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  09:59:39 -0500 (0:00:00.034)       0:01:28.449 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  09:59:39 -0500 (0:00:00.040)       0:01:28.489 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  09:59:39 -0500 (0:00:00.038)       0:01:28.528 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  09:59:39 -0500 (0:00:00.059)       0:01:28.587 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  09:59:39 -0500 (0:00:00.072)       0:01:28.660 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test ext3 defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  09:59:39 -0500 (0:00:00.116)       0:01:28.777 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  09:59:39 -0500 (0:00:00.094)       0:01:28.872 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  09:59:39 -0500 (0:00:00.063)       0:01:28.936 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  09:59:39 -0500 (0:00:00.051)       0:01:28.988 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  09:59:39 -0500 (0:00:00.057)       0:01:29.046 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  09:59:39 -0500 (0:00:00.030)       0:01:29.076 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  09:59:39 -0500 (0:00:00.066)       0:01:29.143 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  09:59:40 -0500 (0:00:00.090)       0:01:29.233 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125952.8907242,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737125952.8907242,
        "dev": 6,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 559,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737125952.8907242,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  09:59:40 -0500 (0:00:00.471)       0:01:29.704 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  09:59:40 -0500 (0:00:00.048)       0:01:29.752 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  09:59:40 -0500 (0:00:00.039)       0:01:29.792 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  09:59:40 -0500 (0:00:00.047)       0:01:29.839 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  09:59:40 -0500 (0:00:00.033)       0:01:29.873 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  09:59:40 -0500 (0:00:00.027)       0:01:29.900 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  09:59:40 -0500 (0:00:00.035)       0:01:29.936 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  09:59:40 -0500 (0:00:00.024)       0:01:29.960 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  09:59:42 -0500 (0:00:01.567)       0:01:31.528 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  09:59:42 -0500 (0:00:00.083)       0:01:31.612 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  09:59:42 -0500 (0:00:00.024)       0:01:31.637 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  09:59:42 -0500 (0:00:00.062)       0:01:31.699 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  09:59:42 -0500 (0:00:00.024)       0:01:31.724 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  09:59:42 -0500 (0:00:00.027)       0:01:31.751 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  09:59:42 -0500 (0:00:00.023)       0:01:31.774 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  09:59:42 -0500 (0:00:00.026)       0:01:31.801 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  09:59:42 -0500 (0:00:00.024)       0:01:31.825 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  09:59:42 -0500 (0:00:00.054)       0:01:31.879 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  09:59:42 -0500 (0:00:00.050)       0:01:31.930 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  09:59:42 -0500 (0:00:00.049)       0:01:31.979 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  09:59:42 -0500 (0:00:00.067)       0:01:32.047 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  09:59:42 -0500 (0:00:00.079)       0:01:32.126 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  09:59:42 -0500 (0:00:00.041)       0:01:32.167 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  09:59:43 -0500 (0:00:00.038)       0:01:32.206 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  09:59:43 -0500 (0:00:00.037)       0:01:32.243 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  09:59:43 -0500 (0:00:00.039)       0:01:32.283 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  09:59:43 -0500 (0:00:00.055)       0:01:32.338 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  09:59:43 -0500 (0:00:00.067)       0:01:32.406 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  09:59:43 -0500 (0:00:00.040)       0:01:32.447 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  09:59:43 -0500 (0:00:00.039)       0:01:32.486 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  09:59:43 -0500 (0:00:00.040)       0:01:32.527 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  09:59:43 -0500 (0:00:00.038)       0:01:32.565 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  09:59:43 -0500 (0:00:00.039)       0:01:32.605 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  09:59:43 -0500 (0:00:00.080)       0:01:32.686 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  09:59:43 -0500 (0:00:00.072)       0:01:32.758 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  09:59:43 -0500 (0:00:00.072)       0:01:32.830 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  09:59:43 -0500 (0:00:00.045)       0:01:32.876 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  09:59:43 -0500 (0:00:00.070)       0:01:32.946 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  09:59:43 -0500 (0:00:00.075)       0:01:33.021 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  09:59:43 -0500 (0:00:00.081)       0:01:33.102 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  09:59:43 -0500 (0:00:00.073)       0:01:33.175 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  09:59:44 -0500 (0:00:00.070)       0:01:33.246 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  09:59:44 -0500 (0:00:00.037)       0:01:33.284 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  09:59:44 -0500 (0:00:00.044)       0:01:33.329 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  09:59:44 -0500 (0:00:00.045)       0:01:33.374 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  09:59:44 -0500 (0:00:00.049)       0:01:33.423 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  09:59:44 -0500 (0:00:00.065)       0:01:33.488 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  09:59:44 -0500 (0:00:00.041)       0:01:33.529 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  09:59:44 -0500 (0:00:00.040)       0:01:33.570 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  09:59:44 -0500 (0:00:00.037)       0:01:33.607 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  09:59:44 -0500 (0:00:00.036)       0:01:33.644 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  09:59:44 -0500 (0:00:00.033)       0:01:33.678 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  09:59:44 -0500 (0:00:00.028)       0:01:33.706 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  09:59:44 -0500 (0:00:00.023)       0:01:33.730 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  09:59:44 -0500 (0:00:00.033)       0:01:33.764 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  09:59:44 -0500 (0:00:00.041)       0:01:33.805 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  09:59:44 -0500 (0:00:00.038)       0:01:33.843 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  09:59:44 -0500 (0:00:00.047)       0:01:33.891 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  09:59:44 -0500 (0:00:00.042)       0:01:33.933 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  09:59:44 -0500 (0:00:00.078)       0:01:34.012 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  09:59:44 -0500 (0:00:00.042)       0:01:34.055 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  09:59:44 -0500 (0:00:00.037)       0:01:34.093 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  09:59:44 -0500 (0:00:00.040)       0:01:34.133 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  09:59:44 -0500 (0:00:00.038)       0:01:34.171 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  09:59:45 -0500 (0:00:00.038)       0:01:34.209 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  09:59:45 -0500 (0:00:00.037)       0:01:34.246 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  09:59:45 -0500 (0:00:00.039)       0:01:34.286 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Friday 17 January 2025  09:59:45 -0500 (0:00:00.038)       0:01:34.324 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Change it back to swap] **************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:100
Friday 17 January 2025  09:59:45 -0500 (0:00:00.040)       0:01:34.364 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  09:59:45 -0500 (0:00:00.198)       0:01:34.562 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  09:59:45 -0500 (0:00:00.097)       0:01:34.659 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  09:59:45 -0500 (0:00:00.244)       0:01:34.903 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  09:59:45 -0500 (0:00:00.104)       0:01:35.007 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  09:59:45 -0500 (0:00:00.051)       0:01:35.058 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  09:59:45 -0500 (0:00:00.082)       0:01:35.141 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  09:59:46 -0500 (0:00:00.055)       0:01:35.196 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  09:59:46 -0500 (0:00:00.073)       0:01:35.270 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  09:59:46 -0500 (0:00:00.176)       0:01:35.446 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  09:59:46 -0500 (0:00:00.064)       0:01:35.511 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  09:59:46 -0500 (0:00:00.073)       0:01:35.584 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "fs_type": "swap",
            "name": "test1",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  09:59:46 -0500 (0:00:00.105)       0:01:35.689 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  09:59:46 -0500 (0:00:00.073)       0:01:35.762 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  09:59:46 -0500 (0:00:00.075)       0:01:35.838 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  09:59:46 -0500 (0:00:00.045)       0:01:35.883 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  09:59:46 -0500 (0:00:00.046)       0:01:35.929 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  09:59:46 -0500 (0:00:00.131)       0:01:36.061 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  09:59:46 -0500 (0:00:00.045)       0:01:36.107 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "ext3"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "swap"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "fstype": "ext3",
            "path": "/opt/test",
            "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
            "state": "absent"
        },
        {
            "path": "/opt/test",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "swap",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "none",
            "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "state": "present"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  09:59:48 -0500 (0:00:01.945)       0:01:38.053 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  09:59:48 -0500 (0:00:00.070)       0:01:38.124 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125955.9247344,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "e79bed29aedb83cd9b5e6740eccfb0323a0b2a88",
        "ctime": 1737125955.9237344,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125955.9237344,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  09:59:49 -0500 (0:00:00.453)       0:01:38.578 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  09:59:49 -0500 (0:00:00.465)       0:01:39.043 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  09:59:49 -0500 (0:00:00.045)       0:01:39.090 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "ext3"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "swap"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "fstype": "ext3",
                "path": "/opt/test",
                "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
                "state": "absent"
            },
            {
                "path": "/opt/test",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "swap",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "none",
                "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "state": "present"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  09:59:50 -0500 (0:00:00.105)       0:01:39.195 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  09:59:50 -0500 (0:00:00.086)       0:01:39.282 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  09:59:50 -0500 (0:00:00.086)       0:01:39.369 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=43d330a0-945a-4a80-983d-e14ce9cb7665', 'path': '/opt/test', 'state': 'absent', 'fstype': 'ext3'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext3",
    "mount_info": {
        "fstype": "ext3",
        "path": "/opt/test",
        "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665",
        "state": "absent"
    },
    "name": "/opt/test",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=43d330a0-945a-4a80-983d-e14ce9cb7665"
}
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node3] => (item={'path': '/opt/test', 'state': 'absent'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "mount_info": {
        "path": "/opt/test",
        "state": "absent"
    },
    "name": "/opt/test",
    "opts": "defaults",
    "passno": "0"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  09:59:51 -0500 (0:00:00.940)       0:01:40.309 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  09:59:51 -0500 (0:00:00.863)       0:01:41.172 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd', 'path': 'none', 'fstype': 'swap', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'present', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "swap",
    "mount_info": {
        "dump": 0,
        "fstype": "swap",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "none",
        "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
        "state": "present"
    },
    "name": "none",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  09:59:52 -0500 (0:00:00.457)       0:01:41.630 ******** 
skipping: [managed-node3] => (item={'src': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd', 'path': 'none', 'fstype': 'swap', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'present', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "swap",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "none",
        "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
        "state": "present"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  09:59:52 -0500 (0:00:00.056)       0:01:41.687 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  09:59:53 -0500 (0:00:00.865)       0:01:42.552 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  09:59:53 -0500 (0:00:00.473)       0:01:43.026 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  09:59:53 -0500 (0:00:00.036)       0:01:43.063 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:110
Friday 17 January 2025  09:59:54 -0500 (0:00:01.096)       0:01:44.159 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Friday 17 January 2025  09:59:55 -0500 (0:00:00.168)       0:01:44.328 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Friday 17 January 2025  09:59:55 -0500 (0:00:00.137)       0:01:44.465 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Friday 17 January 2025  09:59:55 -0500 (0:00:00.154)       0:01:44.620 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "swap",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "12a09b6b-eab6-4e6d-877b-514d13a7ffdd"
        },
        "/dev/sdb": {
            "fstype": "ext3",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "dd89b050-dae2-4398-a070-585dc37b7eff"
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "3b8f3f3c-04e0-475c-a029-4f3419b194d4"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Friday 17 January 2025  09:59:55 -0500 (0:00:00.519)       0:01:45.139 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004877",
    "end": "2025-01-17 09:59:57.331158",
    "rc": 0,
    "start": "2025-01-17 09:59:56.326281"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:07 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=3b8f3f3c-04e0-475c-a029-4f3419b194d4 /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd none swap defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Friday 17 January 2025  09:59:57 -0500 (0:00:01.458)       0:01:46.598 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003786",
    "end": "2025-01-17 09:59:57.771101",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-17 09:59:57.767315"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Friday 17 January 2025  09:59:57 -0500 (0:00:00.438)       0:01:47.037 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Friday 17 January 2025  09:59:57 -0500 (0:00:00.032)       0:01:47.070 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'swap', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sda', '_raw_device': '/dev/sda', '_mount_id': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd', '_kernel_device': '/dev/sda', '_raw_kernel_device': '/dev/sda'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  09:59:57 -0500 (0:00:00.116)       0:01:47.187 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  09:59:58 -0500 (0:00:00.081)       0:01:47.269 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  09:59:58 -0500 (0:00:00.213)       0:01:47.483 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  09:59:58 -0500 (0:00:00.047)       0:01:47.530 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "[SWAP]",
        "storage_test_swap_expected_matches": "1"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  09:59:58 -0500 (0:00:00.078)       0:01:47.608 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  09:59:58 -0500 (0:00:00.036)       0:01:47.645 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  09:59:58 -0500 (0:00:00.048)       0:01:47.693 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  09:59:58 -0500 (0:00:00.038)       0:01:47.731 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  09:59:58 -0500 (0:00:00.034)       0:01:47.766 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  09:59:58 -0500 (0:00:00.036)       0:01:47.803 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/sda"
    ],
    "delta": "0:00:01.003899",
    "end": "2025-01-17 10:00:00.012436",
    "rc": 0,
    "start": "2025-01-17 09:59:59.008537"
}

STDOUT:

/dev/sda

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  10:00:00 -0500 (0:00:01.461)       0:01:49.265 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/proc/swaps"
    ],
    "delta": "0:00:00.003582",
    "end": "2025-01-17 10:00:00.426812",
    "rc": 0,
    "start": "2025-01-17 10:00:00.423230"
}

STDOUT:

Filename				Type		Size		Used		Priority
/dev/zram0                              partition	3802108		8232		100
/dev/sda                                partition	10485756	0		-2

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  10:00:00 -0500 (0:00:00.429)       0:01:49.694 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  10:00:00 -0500 (0:00:00.087)       0:01:49.782 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  10:00:00 -0500 (0:00:00.097)       0:01:49.879 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [
            "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd "
        ],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  10:00:00 -0500 (0:00:00.109)       0:01:49.989 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  10:00:00 -0500 (0:00:00.079)       0:01:50.069 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  10:00:00 -0500 (0:00:00.080)       0:01:50.149 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  10:00:01 -0500 (0:00:00.067)       0:01:50.217 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  10:00:01 -0500 (0:00:00.078)       0:01:50.295 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  10:00:01 -0500 (0:00:00.031)       0:01:50.326 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  10:00:01 -0500 (0:00:00.059)       0:01:50.386 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  10:00:01 -0500 (0:00:00.075)       0:01:50.461 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125988.7268443,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737125988.697844,
        "dev": 6,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 559,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737125988.697844,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  10:00:01 -0500 (0:00:00.423)       0:01:50.885 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  10:00:01 -0500 (0:00:00.030)       0:01:50.916 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  10:00:01 -0500 (0:00:00.023)       0:01:50.940 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  10:00:01 -0500 (0:00:00.032)       0:01:50.973 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  10:00:01 -0500 (0:00:00.028)       0:01:51.001 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  10:00:01 -0500 (0:00:00.023)       0:01:51.025 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  10:00:01 -0500 (0:00:00.030)       0:01:51.055 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  10:00:01 -0500 (0:00:00.024)       0:01:51.079 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  10:00:03 -0500 (0:00:01.624)       0:01:52.704 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  10:00:03 -0500 (0:00:00.072)       0:01:52.777 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  10:00:03 -0500 (0:00:00.069)       0:01:52.847 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  10:00:03 -0500 (0:00:00.105)       0:01:52.952 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  10:00:03 -0500 (0:00:00.038)       0:01:52.991 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  10:00:03 -0500 (0:00:00.038)       0:01:53.029 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  10:00:03 -0500 (0:00:00.037)       0:01:53.067 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  10:00:03 -0500 (0:00:00.042)       0:01:53.110 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  10:00:03 -0500 (0:00:00.039)       0:01:53.150 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  10:00:04 -0500 (0:00:00.085)       0:01:53.235 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  10:00:04 -0500 (0:00:00.105)       0:01:53.341 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  10:00:04 -0500 (0:00:00.072)       0:01:53.413 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  10:00:04 -0500 (0:00:00.075)       0:01:53.489 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  10:00:04 -0500 (0:00:00.081)       0:01:53.570 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  10:00:04 -0500 (0:00:00.046)       0:01:53.617 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  10:00:04 -0500 (0:00:00.058)       0:01:53.676 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  10:00:04 -0500 (0:00:00.040)       0:01:53.717 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  10:00:04 -0500 (0:00:00.038)       0:01:53.755 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  10:00:04 -0500 (0:00:00.035)       0:01:53.791 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  10:00:04 -0500 (0:00:00.041)       0:01:53.832 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  10:00:04 -0500 (0:00:00.039)       0:01:53.872 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  10:00:04 -0500 (0:00:00.038)       0:01:53.910 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  10:00:04 -0500 (0:00:00.039)       0:01:53.949 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  10:00:04 -0500 (0:00:00.038)       0:01:53.987 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  10:00:04 -0500 (0:00:00.038)       0:01:54.026 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  10:00:04 -0500 (0:00:00.078)       0:01:54.104 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  10:00:04 -0500 (0:00:00.069)       0:01:54.174 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  10:00:05 -0500 (0:00:00.055)       0:01:54.230 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  10:00:05 -0500 (0:00:00.030)       0:01:54.260 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  10:00:05 -0500 (0:00:00.052)       0:01:54.312 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  10:00:05 -0500 (0:00:00.057)       0:01:54.370 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  10:00:05 -0500 (0:00:00.053)       0:01:54.424 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  10:00:05 -0500 (0:00:00.045)       0:01:54.469 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  10:00:05 -0500 (0:00:00.045)       0:01:54.515 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  10:00:05 -0500 (0:00:00.023)       0:01:54.539 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  10:00:05 -0500 (0:00:00.024)       0:01:54.563 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  10:00:05 -0500 (0:00:00.026)       0:01:54.590 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  10:00:05 -0500 (0:00:00.028)       0:01:54.619 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  10:00:05 -0500 (0:00:00.039)       0:01:54.658 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  10:00:05 -0500 (0:00:00.038)       0:01:54.696 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  10:00:05 -0500 (0:00:00.067)       0:01:54.764 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  10:00:05 -0500 (0:00:00.035)       0:01:54.799 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  10:00:05 -0500 (0:00:00.038)       0:01:54.837 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  10:00:05 -0500 (0:00:00.044)       0:01:54.882 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  10:00:05 -0500 (0:00:00.039)       0:01:54.921 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  10:00:05 -0500 (0:00:00.040)       0:01:54.962 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  10:00:05 -0500 (0:00:00.037)       0:01:54.999 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  10:00:05 -0500 (0:00:00.039)       0:01:55.040 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  10:00:05 -0500 (0:00:00.036)       0:01:55.076 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  10:00:05 -0500 (0:00:00.048)       0:01:55.124 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  10:00:06 -0500 (0:00:00.104)       0:01:55.229 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  10:00:06 -0500 (0:00:00.071)       0:01:55.300 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  10:00:06 -0500 (0:00:00.039)       0:01:55.340 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  10:00:06 -0500 (0:00:00.037)       0:01:55.377 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  10:00:06 -0500 (0:00:00.037)       0:01:55.415 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  10:00:06 -0500 (0:00:00.036)       0:01:55.452 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  10:00:06 -0500 (0:00:00.040)       0:01:55.492 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  10:00:06 -0500 (0:00:00.039)       0:01:55.531 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  10:00:06 -0500 (0:00:00.038)       0:01:55.570 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Friday 17 January 2025  10:00:06 -0500 (0:00:00.041)       0:01:55.612 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:113
Friday 17 January 2025  10:00:06 -0500 (0:00:00.040)       0:01:55.652 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  10:00:06 -0500 (0:00:00.173)       0:01:55.825 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  10:00:06 -0500 (0:00:00.066)       0:01:55.891 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  10:00:06 -0500 (0:00:00.075)       0:01:55.967 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  10:00:06 -0500 (0:00:00.083)       0:01:56.051 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  10:00:06 -0500 (0:00:00.044)       0:01:56.096 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  10:00:06 -0500 (0:00:00.044)       0:01:56.140 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  10:00:06 -0500 (0:00:00.041)       0:01:56.182 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  10:00:07 -0500 (0:00:00.045)       0:01:56.227 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  10:00:07 -0500 (0:00:00.094)       0:01:56.322 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  10:00:07 -0500 (0:00:00.064)       0:01:56.386 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  10:00:07 -0500 (0:00:00.041)       0:01:56.428 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "fs_type": "swap",
            "name": "test1",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  10:00:07 -0500 (0:00:00.048)       0:01:56.477 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  10:00:07 -0500 (0:00:00.086)       0:01:56.563 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  10:00:07 -0500 (0:00:00.077)       0:01:56.641 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  10:00:07 -0500 (0:00:00.083)       0:01:56.724 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  10:00:07 -0500 (0:00:00.068)       0:01:56.793 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  10:00:07 -0500 (0:00:00.184)       0:01:56.980 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  10:00:07 -0500 (0:00:00.051)       0:01:57.031 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "swap",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "none",
            "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "state": "present"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  10:00:09 -0500 (0:00:01.638)       0:01:58.670 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  10:00:09 -0500 (0:00:00.076)       0:01:58.747 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125992.3738563,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "19198d5856f71bcb1cffb4990f211795b664c2d0",
        "ctime": 1737125992.3728564,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125992.3728564,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1431,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  10:00:10 -0500 (0:00:00.531)       0:01:59.279 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  10:00:10 -0500 (0:00:00.052)       0:01:59.332 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  10:00:10 -0500 (0:00:00.036)       0:01:59.368 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "swap",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "none",
                "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "state": "present"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  10:00:10 -0500 (0:00:00.055)       0:01:59.424 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  10:00:10 -0500 (0:00:00.044)       0:01:59.468 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  10:00:10 -0500 (0:00:00.048)       0:01:59.517 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  10:00:10 -0500 (0:00:00.089)       0:01:59.606 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  10:00:11 -0500 (0:00:00.983)       0:02:00.590 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node3] => (item={'src': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd', 'path': 'none', 'fstype': 'swap', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'present', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "swap",
    "mount_info": {
        "dump": 0,
        "fstype": "swap",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "none",
        "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
        "state": "present"
    },
    "name": "none",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  10:00:11 -0500 (0:00:00.495)       0:02:01.085 ******** 
skipping: [managed-node3] => (item={'src': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd', 'path': 'none', 'fstype': 'swap', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'present', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "swap",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "none",
        "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
        "state": "present"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  10:00:11 -0500 (0:00:00.075)       0:02:01.161 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  10:00:12 -0500 (0:00:00.898)       0:02:02.059 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  10:00:13 -0500 (0:00:00.426)       0:02:02.486 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  10:00:13 -0500 (0:00:00.022)       0:02:02.508 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:123
Friday 17 January 2025  10:00:14 -0500 (0:00:00.942)       0:02:03.451 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Friday 17 January 2025  10:00:14 -0500 (0:00:00.063)       0:02:03.514 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Friday 17 January 2025  10:00:14 -0500 (0:00:00.044)       0:02:03.559 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Friday 17 January 2025  10:00:14 -0500 (0:00:00.054)       0:02:03.614 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "swap",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "12a09b6b-eab6-4e6d-877b-514d13a7ffdd"
        },
        "/dev/sdb": {
            "fstype": "ext3",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "dd89b050-dae2-4398-a070-585dc37b7eff"
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "3b8f3f3c-04e0-475c-a029-4f3419b194d4"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Friday 17 January 2025  10:00:14 -0500 (0:00:00.438)       0:02:04.053 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003701",
    "end": "2025-01-17 10:00:15.230356",
    "rc": 0,
    "start": "2025-01-17 10:00:15.226655"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:07 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=3b8f3f3c-04e0-475c-a029-4f3419b194d4 /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd none swap defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Friday 17 January 2025  10:00:15 -0500 (0:00:00.429)       0:02:04.482 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003261",
    "end": "2025-01-17 10:00:15.633866",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-17 10:00:15.630605"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Friday 17 January 2025  10:00:15 -0500 (0:00:00.413)       0:02:04.896 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Friday 17 January 2025  10:00:15 -0500 (0:00:00.030)       0:02:04.927 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'swap', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sda', '_raw_device': '/dev/sda', '_mount_id': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd', '_kernel_device': '/dev/sda', '_raw_kernel_device': '/dev/sda'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  10:00:15 -0500 (0:00:00.177)       0:02:05.104 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  10:00:15 -0500 (0:00:00.061)       0:02:05.166 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  10:00:16 -0500 (0:00:00.152)       0:02:05.319 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  10:00:16 -0500 (0:00:00.043)       0:02:05.362 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "[SWAP]",
        "storage_test_swap_expected_matches": "1"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  10:00:16 -0500 (0:00:00.053)       0:02:05.415 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  10:00:16 -0500 (0:00:00.024)       0:02:05.440 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  10:00:16 -0500 (0:00:00.029)       0:02:05.469 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  10:00:16 -0500 (0:00:00.024)       0:02:05.493 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  10:00:16 -0500 (0:00:00.026)       0:02:05.520 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  10:00:16 -0500 (0:00:00.024)       0:02:05.545 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/sda"
    ],
    "delta": "0:00:00.003221",
    "end": "2025-01-17 10:00:16.713982",
    "rc": 0,
    "start": "2025-01-17 10:00:16.710761"
}

STDOUT:

/dev/sda

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  10:00:16 -0500 (0:00:00.420)       0:02:05.966 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/proc/swaps"
    ],
    "delta": "0:00:00.003312",
    "end": "2025-01-17 10:00:17.118434",
    "rc": 0,
    "start": "2025-01-17 10:00:17.115122"
}

STDOUT:

Filename				Type		Size		Used		Priority
/dev/zram0                              partition	3802108		8232		100
/dev/sda                                partition	10485756	0		-2

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  10:00:17 -0500 (0:00:00.413)       0:02:06.379 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  10:00:17 -0500 (0:00:00.055)       0:02:06.434 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  10:00:17 -0500 (0:00:00.025)       0:02:06.460 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [
            "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd "
        ],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  10:00:17 -0500 (0:00:00.076)       0:02:06.536 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  10:00:17 -0500 (0:00:00.050)       0:02:06.587 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  10:00:17 -0500 (0:00:00.051)       0:02:06.639 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  10:00:17 -0500 (0:00:00.044)       0:02:06.683 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  10:00:17 -0500 (0:00:00.049)       0:02:06.733 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  10:00:17 -0500 (0:00:00.025)       0:02:06.759 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  10:00:17 -0500 (0:00:00.063)       0:02:06.822 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  10:00:17 -0500 (0:00:00.072)       0:02:06.895 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125988.7268443,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737125988.697844,
        "dev": 6,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 559,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737125988.697844,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  10:00:18 -0500 (0:00:00.444)       0:02:07.339 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  10:00:18 -0500 (0:00:00.031)       0:02:07.371 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  10:00:18 -0500 (0:00:00.058)       0:02:07.429 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  10:00:18 -0500 (0:00:00.065)       0:02:07.494 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  10:00:18 -0500 (0:00:00.047)       0:02:07.541 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  10:00:18 -0500 (0:00:00.038)       0:02:07.580 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  10:00:18 -0500 (0:00:00.033)       0:02:07.613 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  10:00:18 -0500 (0:00:00.027)       0:02:07.641 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  10:00:20 -0500 (0:00:01.566)       0:02:09.208 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  10:00:20 -0500 (0:00:00.023)       0:02:09.231 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  10:00:20 -0500 (0:00:00.026)       0:02:09.258 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  10:00:20 -0500 (0:00:00.052)       0:02:09.310 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  10:00:20 -0500 (0:00:00.024)       0:02:09.334 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  10:00:20 -0500 (0:00:00.024)       0:02:09.359 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  10:00:20 -0500 (0:00:00.024)       0:02:09.383 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  10:00:20 -0500 (0:00:00.024)       0:02:09.408 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  10:00:20 -0500 (0:00:00.026)       0:02:09.434 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  10:00:20 -0500 (0:00:00.054)       0:02:09.489 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  10:00:20 -0500 (0:00:00.063)       0:02:09.552 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  10:00:20 -0500 (0:00:00.052)       0:02:09.605 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  10:00:20 -0500 (0:00:00.044)       0:02:09.650 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  10:00:20 -0500 (0:00:00.050)       0:02:09.700 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  10:00:20 -0500 (0:00:00.028)       0:02:09.729 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  10:00:20 -0500 (0:00:00.024)       0:02:09.753 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  10:00:20 -0500 (0:00:00.024)       0:02:09.777 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  10:00:20 -0500 (0:00:00.023)       0:02:09.801 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  10:00:20 -0500 (0:00:00.028)       0:02:09.830 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  10:00:20 -0500 (0:00:00.032)       0:02:09.862 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  10:00:20 -0500 (0:00:00.042)       0:02:09.905 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  10:00:20 -0500 (0:00:00.040)       0:02:09.945 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  10:00:20 -0500 (0:00:00.040)       0:02:09.985 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  10:00:20 -0500 (0:00:00.028)       0:02:10.014 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  10:00:20 -0500 (0:00:00.028)       0:02:10.042 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  10:00:20 -0500 (0:00:00.055)       0:02:10.098 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  10:00:20 -0500 (0:00:00.063)       0:02:10.161 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  10:00:21 -0500 (0:00:00.047)       0:02:10.209 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  10:00:21 -0500 (0:00:00.028)       0:02:10.238 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  10:00:21 -0500 (0:00:00.045)       0:02:10.283 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  10:00:21 -0500 (0:00:00.044)       0:02:10.328 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  10:00:21 -0500 (0:00:00.048)       0:02:10.377 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  10:00:21 -0500 (0:00:00.154)       0:02:10.531 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  10:00:21 -0500 (0:00:00.047)       0:02:10.578 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  10:00:21 -0500 (0:00:00.038)       0:02:10.616 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  10:00:21 -0500 (0:00:00.027)       0:02:10.644 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  10:00:21 -0500 (0:00:00.024)       0:02:10.668 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  10:00:21 -0500 (0:00:00.023)       0:02:10.691 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  10:00:21 -0500 (0:00:00.027)       0:02:10.718 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  10:00:21 -0500 (0:00:00.024)       0:02:10.742 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  10:00:21 -0500 (0:00:00.023)       0:02:10.766 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  10:00:21 -0500 (0:00:00.024)       0:02:10.790 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  10:00:21 -0500 (0:00:00.023)       0:02:10.814 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  10:00:21 -0500 (0:00:00.024)       0:02:10.838 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  10:00:21 -0500 (0:00:00.026)       0:02:10.865 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  10:00:21 -0500 (0:00:00.024)       0:02:10.890 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  10:00:21 -0500 (0:00:00.024)       0:02:10.914 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  10:00:21 -0500 (0:00:00.029)       0:02:10.944 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  10:00:21 -0500 (0:00:00.028)       0:02:10.972 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  10:00:21 -0500 (0:00:00.044)       0:02:11.017 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  10:00:21 -0500 (0:00:00.046)       0:02:11.063 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  10:00:21 -0500 (0:00:00.073)       0:02:11.137 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  10:00:21 -0500 (0:00:00.038)       0:02:11.176 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  10:00:22 -0500 (0:00:00.038)       0:02:11.214 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  10:00:22 -0500 (0:00:00.038)       0:02:11.253 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  10:00:22 -0500 (0:00:00.037)       0:02:11.290 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  10:00:22 -0500 (0:00:00.041)       0:02:11.332 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  10:00:22 -0500 (0:00:00.037)       0:02:11.369 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  10:00:22 -0500 (0:00:00.038)       0:02:11.408 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Friday 17 January 2025  10:00:22 -0500 (0:00:00.037)       0:02:11.445 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:126
Friday 17 January 2025  10:00:22 -0500 (0:00:00.039)       0:02:11.485 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Friday 17 January 2025  10:00:22 -0500 (0:00:00.202)       0:02:11.688 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Friday 17 January 2025  10:00:22 -0500 (0:00:00.066)       0:02:11.754 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Friday 17 January 2025  10:00:22 -0500 (0:00:00.078)       0:02:11.832 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_40.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_40.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Friday 17 January 2025  10:00:22 -0500 (0:00:00.087)       0:02:11.920 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Friday 17 January 2025  10:00:22 -0500 (0:00:00.043)       0:02:11.963 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Friday 17 January 2025  10:00:22 -0500 (0:00:00.047)       0:02:12.011 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Friday 17 January 2025  10:00:22 -0500 (0:00:00.042)       0:02:12.053 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Friday 17 January 2025  10:00:22 -0500 (0:00:00.047)       0:02:12.101 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Friday 17 January 2025  10:00:23 -0500 (0:00:00.094)       0:02:12.195 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Friday 17 January 2025  10:00:23 -0500 (0:00:00.046)       0:02:12.241 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Friday 17 January 2025  10:00:23 -0500 (0:00:00.042)       0:02:12.284 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "mount_point": "/opt/test",
            "name": "test1",
            "state": "absent",
            "type": "disk"
        },
        {
            "disks": [
                "sdb"
            ],
            "mount_point": "none",
            "name": "test2",
            "state": "absent",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Friday 17 January 2025  10:00:23 -0500 (0:00:00.051)       0:02:12.336 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Friday 17 January 2025  10:00:23 -0500 (0:00:00.046)       0:02:12.382 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Friday 17 January 2025  10:00:23 -0500 (0:00:00.047)       0:02:12.430 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Friday 17 January 2025  10:00:23 -0500 (0:00:00.046)       0:02:12.476 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Friday 17 January 2025  10:00:23 -0500 (0:00:00.048)       0:02:12.525 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Friday 17 January 2025  10:00:23 -0500 (0:00:00.104)       0:02:12.629 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Friday 17 January 2025  10:00:23 -0500 (0:00:00.038)       0:02:12.668 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "ext3"
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "swap"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "fstype": "swap",
            "path": "none",
            "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "state": "absent"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test",
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        },
        {
            "_device": "/dev/sdb",
            "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
            "_raw_device": "/dev/sdb",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "none",
            "mount_user": null,
            "name": "test2",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Friday 17 January 2025  10:00:25 -0500 (0:00:01.835)       0:02:14.504 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Friday 17 January 2025  10:00:25 -0500 (0:00:00.054)       0:02:14.559 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737125992.3738563,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "19198d5856f71bcb1cffb4990f211795b664c2d0",
        "ctime": 1737125992.3728564,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 281544,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737125992.3728564,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1431,
        "uid": 0,
        "version": "2570911275",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Friday 17 January 2025  10:00:25 -0500 (0:00:00.433)       0:02:14.992 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Friday 17 January 2025  10:00:26 -0500 (0:00:00.448)       0:02:15.441 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Friday 17 January 2025  10:00:26 -0500 (0:00:00.028)       0:02:15.470 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "ext3"
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "swap"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "fstype": "swap",
                "path": "none",
                "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "state": "absent"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test",
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            },
            {
                "_device": "/dev/sdb",
                "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
                "_raw_device": "/dev/sdb",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "none",
                "mount_user": null,
                "name": "test2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Friday 17 January 2025  10:00:26 -0500 (0:00:00.043)       0:02:15.514 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Friday 17 January 2025  10:00:26 -0500 (0:00:00.028)       0:02:15.542 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "swap",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test",
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            },
            {
                "_device": "/dev/sdb",
                "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
                "_raw_device": "/dev/sdb",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext3",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": "none",
                "mount_user": null,
                "name": "test2",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Friday 17 January 2025  10:00:26 -0500 (0:00:00.031)       0:02:15.574 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd', 'path': 'none', 'state': 'absent', 'fstype': 'swap'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "swap",
    "mount_info": {
        "fstype": "swap",
        "path": "none",
        "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
        "state": "absent"
    },
    "name": "none",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Friday 17 January 2025  10:00:26 -0500 (0:00:00.444)       0:02:16.018 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Friday 17 January 2025  10:00:27 -0500 (0:00:00.871)       0:02:16.889 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Friday 17 January 2025  10:00:27 -0500 (0:00:00.070)       0:02:16.960 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Friday 17 January 2025  10:00:27 -0500 (0:00:00.178)       0:02:17.139 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Friday 17 January 2025  10:00:28 -0500 (0:00:00.898)       0:02:18.037 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737124906.382958,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1737124902.2659435,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1737124902.2670383,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "205140862",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Friday 17 January 2025  10:00:29 -0500 (0:00:00.412)       0:02:18.449 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Friday 17 January 2025  10:00:29 -0500 (0:00:00.024)       0:02:18.473 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/tests_swap.yml:142
Friday 17 January 2025  10:00:30 -0500 (0:00:00.947)       0:02:19.420 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Friday 17 January 2025  10:00:30 -0500 (0:00:00.067)       0:02:19.488 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Friday 17 January 2025  10:00:30 -0500 (0:00:00.048)       0:02:19.537 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sda",
            "_mount_id": "UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "swap",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test",
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        },
        {
            "_device": "/dev/sdb",
            "_mount_id": "UUID=dd89b050-dae2-4398-a070-585dc37b7eff",
            "_raw_device": "/dev/sdb",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext3",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": "none",
            "mount_user": null,
            "name": "test2",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Friday 17 January 2025  10:00:30 -0500 (0:00:00.056)       0:02:19.593 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "3b8f3f3c-04e0-475c-a029-4f3419b194d4"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Friday 17 January 2025  10:00:30 -0500 (0:00:00.437)       0:02:20.030 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003292",
    "end": "2025-01-17 10:00:31.218076",
    "rc": 0,
    "start": "2025-01-17 10:00:31.214784"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:07 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=3b8f3f3c-04e0-475c-a029-4f3419b194d4 /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Friday 17 January 2025  10:00:31 -0500 (0:00:00.440)       0:02:20.471 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:01.004565",
    "end": "2025-01-17 10:00:32.642051",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-17 10:00:31.637486"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Friday 17 January 2025  10:00:32 -0500 (0:00:01.424)       0:02:21.896 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Friday 17 January 2025  10:00:32 -0500 (0:00:00.021)       0:02:21.917 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'swap', 'mount_options': 'defaults', 'mount_point': '/opt/test', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 10737418240, 'state': 'absent', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sda', '_raw_device': '/dev/sda', '_mount_id': 'UUID=12a09b6b-eab6-4e6d-877b-514d13a7ffdd'})
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext3', 'mount_options': 'defaults', 'mount_point': 'none', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': 10737418240, 'state': 'absent', 'type': 'disk', 'disks': ['sdb'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sdb', '_raw_device': '/dev/sdb', '_mount_id': 'UUID=dd89b050-dae2-4398-a070-585dc37b7eff'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  10:00:32 -0500 (0:00:00.108)       0:02:22.025 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  10:00:32 -0500 (0:00:00.066)       0:02:22.092 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  10:00:33 -0500 (0:00:00.159)       0:02:22.251 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  10:00:33 -0500 (0:00:00.034)       0:02:22.286 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "[SWAP]",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  10:00:33 -0500 (0:00:00.052)       0:02:22.338 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  10:00:33 -0500 (0:00:00.024)       0:02:22.363 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  10:00:33 -0500 (0:00:00.022)       0:02:22.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  10:00:33 -0500 (0:00:00.025)       0:02:22.410 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  10:00:33 -0500 (0:00:00.024)       0:02:22.434 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  10:00:33 -0500 (0:00:00.025)       0:02:22.460 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/sda"
    ],
    "delta": "0:00:00.003288",
    "end": "2025-01-17 10:00:33.663411",
    "rc": 0,
    "start": "2025-01-17 10:00:33.660123"
}

STDOUT:

/dev/sda

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  10:00:33 -0500 (0:00:00.471)       0:02:22.932 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/proc/swaps"
    ],
    "delta": "0:00:00.003332",
    "end": "2025-01-17 10:00:34.113444",
    "rc": 0,
    "start": "2025-01-17 10:00:34.110112"
}

STDOUT:

Filename				Type		Size		Used		Priority
/dev/zram0                              partition	3802108		8232		100

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  10:00:34 -0500 (0:00:00.448)       0:02:23.380 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  10:00:34 -0500 (0:00:00.088)       0:02:23.468 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  10:00:34 -0500 (0:00:00.045)       0:02:23.514 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  10:00:34 -0500 (0:00:00.095)       0:02:23.609 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  10:00:34 -0500 (0:00:00.049)       0:02:23.659 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  10:00:34 -0500 (0:00:00.058)       0:02:23.718 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  10:00:34 -0500 (0:00:00.046)       0:02:23.765 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  10:00:34 -0500 (0:00:00.054)       0:02:23.819 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  10:00:34 -0500 (0:00:00.027)       0:02:23.847 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  10:00:34 -0500 (0:00:00.025)       0:02:23.873 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  10:00:34 -0500 (0:00:00.045)       0:02:23.918 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737126025.1759663,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737126025.1759663,
        "dev": 6,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 559,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737126025.1759663,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  10:00:35 -0500 (0:00:00.445)       0:02:24.364 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  10:00:35 -0500 (0:00:00.045)       0:02:24.409 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  10:00:35 -0500 (0:00:00.041)       0:02:24.450 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  10:00:35 -0500 (0:00:00.036)       0:02:24.487 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  10:00:35 -0500 (0:00:00.043)       0:02:24.530 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  10:00:35 -0500 (0:00:00.036)       0:02:24.566 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  10:00:35 -0500 (0:00:00.037)       0:02:24.604 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  10:00:35 -0500 (0:00:00.033)       0:02:24.637 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  10:00:37 -0500 (0:00:01.627)       0:02:26.265 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  10:00:37 -0500 (0:00:00.026)       0:02:26.291 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  10:00:37 -0500 (0:00:00.024)       0:02:26.315 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  10:00:37 -0500 (0:00:00.028)       0:02:26.344 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  10:00:37 -0500 (0:00:00.038)       0:02:26.383 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  10:00:37 -0500 (0:00:00.025)       0:02:26.408 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  10:00:37 -0500 (0:00:00.022)       0:02:26.430 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  10:00:37 -0500 (0:00:00.025)       0:02:26.456 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  10:00:37 -0500 (0:00:00.022)       0:02:26.479 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  10:00:37 -0500 (0:00:00.053)       0:02:26.532 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  10:00:37 -0500 (0:00:00.050)       0:02:26.583 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  10:00:37 -0500 (0:00:00.045)       0:02:26.629 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  10:00:37 -0500 (0:00:00.053)       0:02:26.682 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  10:00:37 -0500 (0:00:00.215)       0:02:26.898 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  10:00:37 -0500 (0:00:00.039)       0:02:26.938 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  10:00:37 -0500 (0:00:00.037)       0:02:26.976 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  10:00:37 -0500 (0:00:00.037)       0:02:27.013 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  10:00:37 -0500 (0:00:00.038)       0:02:27.051 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  10:00:37 -0500 (0:00:00.037)       0:02:27.089 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  10:00:37 -0500 (0:00:00.040)       0:02:27.130 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  10:00:37 -0500 (0:00:00.037)       0:02:27.167 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  10:00:38 -0500 (0:00:00.039)       0:02:27.207 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  10:00:38 -0500 (0:00:00.038)       0:02:27.245 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  10:00:38 -0500 (0:00:00.039)       0:02:27.284 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  10:00:38 -0500 (0:00:00.038)       0:02:27.322 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  10:00:38 -0500 (0:00:00.095)       0:02:27.417 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  10:00:38 -0500 (0:00:00.081)       0:02:27.499 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  10:00:38 -0500 (0:00:00.067)       0:02:27.567 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  10:00:38 -0500 (0:00:00.043)       0:02:27.611 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  10:00:38 -0500 (0:00:00.067)       0:02:27.678 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  10:00:38 -0500 (0:00:00.073)       0:02:27.752 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  10:00:38 -0500 (0:00:00.078)       0:02:27.830 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  10:00:38 -0500 (0:00:00.068)       0:02:27.899 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  10:00:38 -0500 (0:00:00.069)       0:02:27.968 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  10:00:38 -0500 (0:00:00.036)       0:02:28.005 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  10:00:38 -0500 (0:00:00.037)       0:02:28.042 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  10:00:38 -0500 (0:00:00.035)       0:02:28.078 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  10:00:38 -0500 (0:00:00.043)       0:02:28.121 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  10:00:38 -0500 (0:00:00.040)       0:02:28.161 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  10:00:39 -0500 (0:00:00.035)       0:02:28.196 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  10:00:39 -0500 (0:00:00.036)       0:02:28.233 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  10:00:39 -0500 (0:00:00.035)       0:02:28.268 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  10:00:39 -0500 (0:00:00.035)       0:02:28.304 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  10:00:39 -0500 (0:00:00.041)       0:02:28.346 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  10:00:39 -0500 (0:00:00.038)       0:02:28.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  10:00:39 -0500 (0:00:00.037)       0:02:28.422 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  10:00:39 -0500 (0:00:00.038)       0:02:28.460 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  10:00:39 -0500 (0:00:00.051)       0:02:28.512 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  10:00:39 -0500 (0:00:00.058)       0:02:28.570 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  10:00:39 -0500 (0:00:00.043)       0:02:28.614 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  10:00:39 -0500 (0:00:00.046)       0:02:28.661 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  10:00:39 -0500 (0:00:00.066)       0:02:28.728 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  10:00:39 -0500 (0:00:00.039)       0:02:28.767 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  10:00:39 -0500 (0:00:00.035)       0:02:28.803 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  10:00:39 -0500 (0:00:00.039)       0:02:28.842 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  10:00:39 -0500 (0:00:00.043)       0:02:28.886 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  10:00:39 -0500 (0:00:00.039)       0:02:28.925 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  10:00:39 -0500 (0:00:00.038)       0:02:28.963 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  10:00:39 -0500 (0:00:00.039)       0:02:29.003 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Friday 17 January 2025  10:00:39 -0500 (0:00:00.040)       0:02:29.044 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Friday 17 January 2025  10:00:39 -0500 (0:00:00.084)       0:02:29.128 ******** 
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Friday 17 January 2025  10:00:40 -0500 (0:00:00.188)       0:02:29.317 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sdb"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Friday 17 January 2025  10:00:40 -0500 (0:00:00.054)       0:02:29.372 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Friday 17 January 2025  10:00:40 -0500 (0:00:00.083)       0:02:29.456 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Friday 17 January 2025  10:00:40 -0500 (0:00:00.040)       0:02:29.496 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Friday 17 January 2025  10:00:40 -0500 (0:00:00.034)       0:02:29.531 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Friday 17 January 2025  10:00:40 -0500 (0:00:00.049)       0:02:29.580 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Friday 17 January 2025  10:00:40 -0500 (0:00:00.057)       0:02:29.638 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Friday 17 January 2025  10:00:40 -0500 (0:00:00.037)       0:02:29.676 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Friday 17 January 2025  10:00:40 -0500 (0:00:00.040)       0:02:29.716 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Friday 17 January 2025  10:00:40 -0500 (0:00:00.039)       0:02:29.756 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Friday 17 January 2025  10:00:40 -0500 (0:00:00.039)       0:02:29.795 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Friday 17 January 2025  10:00:40 -0500 (0:00:00.032)       0:02:29.827 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Friday 17 January 2025  10:00:40 -0500 (0:00:00.079)       0:02:29.907 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Friday 17 January 2025  10:00:40 -0500 (0:00:00.049)       0:02:29.957 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Friday 17 January 2025  10:00:40 -0500 (0:00:00.120)       0:02:30.077 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Friday 17 January 2025  10:00:41 -0500 (0:00:00.164)       0:02:30.242 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Friday 17 January 2025  10:00:41 -0500 (0:00:00.080)       0:02:30.322 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Friday 17 January 2025  10:00:41 -0500 (0:00:00.040)       0:02:30.363 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Friday 17 January 2025  10:00:41 -0500 (0:00:00.042)       0:02:30.405 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Friday 17 January 2025  10:00:41 -0500 (0:00:00.069)       0:02:30.474 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737126025.120966,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737126025.120966,
        "dev": 6,
        "device_type": 2064,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 572,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737126025.120966,
        "nlink": 1,
        "path": "/dev/sdb",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Friday 17 January 2025  10:00:41 -0500 (0:00:00.511)       0:02:30.985 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Friday 17 January 2025  10:00:41 -0500 (0:00:00.046)       0:02:31.032 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Friday 17 January 2025  10:00:41 -0500 (0:00:00.037)       0:02:31.069 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Friday 17 January 2025  10:00:41 -0500 (0:00:00.025)       0:02:31.095 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Friday 17 January 2025  10:00:41 -0500 (0:00:00.033)       0:02:31.128 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Friday 17 January 2025  10:00:41 -0500 (0:00:00.029)       0:02:31.158 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Friday 17 January 2025  10:00:41 -0500 (0:00:00.023)       0:02:31.182 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Friday 17 January 2025  10:00:42 -0500 (0:00:00.026)       0:02:31.208 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Friday 17 January 2025  10:00:43 -0500 (0:00:01.637)       0:02:32.846 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Friday 17 January 2025  10:00:43 -0500 (0:00:00.034)       0:02:32.880 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Friday 17 January 2025  10:00:43 -0500 (0:00:00.038)       0:02:32.919 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Friday 17 January 2025  10:00:43 -0500 (0:00:00.037)       0:02:32.957 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Friday 17 January 2025  10:00:43 -0500 (0:00:00.039)       0:02:32.996 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Friday 17 January 2025  10:00:43 -0500 (0:00:00.042)       0:02:33.038 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Friday 17 January 2025  10:00:43 -0500 (0:00:00.042)       0:02:33.081 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Friday 17 January 2025  10:00:43 -0500 (0:00:00.055)       0:02:33.136 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Friday 17 January 2025  10:00:44 -0500 (0:00:00.067)       0:02:33.204 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Friday 17 January 2025  10:00:44 -0500 (0:00:00.168)       0:02:33.372 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Friday 17 January 2025  10:00:44 -0500 (0:00:00.123)       0:02:33.495 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Friday 17 January 2025  10:00:44 -0500 (0:00:00.089)       0:02:33.585 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Friday 17 January 2025  10:00:44 -0500 (0:00:00.090)       0:02:33.676 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Friday 17 January 2025  10:00:44 -0500 (0:00:00.071)       0:02:33.747 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Friday 17 January 2025  10:00:44 -0500 (0:00:00.042)       0:02:33.790 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Friday 17 January 2025  10:00:44 -0500 (0:00:00.035)       0:02:33.826 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Friday 17 January 2025  10:00:44 -0500 (0:00:00.045)       0:02:33.871 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Friday 17 January 2025  10:00:44 -0500 (0:00:00.060)       0:02:33.931 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Friday 17 January 2025  10:00:44 -0500 (0:00:00.040)       0:02:33.972 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Friday 17 January 2025  10:00:44 -0500 (0:00:00.040)       0:02:34.013 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Friday 17 January 2025  10:00:44 -0500 (0:00:00.039)       0:02:34.052 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Friday 17 January 2025  10:00:44 -0500 (0:00:00.040)       0:02:34.093 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Friday 17 January 2025  10:00:44 -0500 (0:00:00.040)       0:02:34.134 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Friday 17 January 2025  10:00:44 -0500 (0:00:00.042)       0:02:34.177 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Friday 17 January 2025  10:00:45 -0500 (0:00:00.041)       0:02:34.218 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Friday 17 January 2025  10:00:45 -0500 (0:00:00.070)       0:02:34.289 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Friday 17 January 2025  10:00:45 -0500 (0:00:00.072)       0:02:34.361 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Friday 17 January 2025  10:00:45 -0500 (0:00:00.078)       0:02:34.440 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Friday 17 January 2025  10:00:45 -0500 (0:00:00.044)       0:02:34.484 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Friday 17 January 2025  10:00:45 -0500 (0:00:00.147)       0:02:34.632 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Friday 17 January 2025  10:00:45 -0500 (0:00:00.127)       0:02:34.760 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Friday 17 January 2025  10:00:45 -0500 (0:00:00.100)       0:02:34.861 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Friday 17 January 2025  10:00:45 -0500 (0:00:00.135)       0:02:34.997 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Friday 17 January 2025  10:00:45 -0500 (0:00:00.097)       0:02:35.094 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Friday 17 January 2025  10:00:45 -0500 (0:00:00.067)       0:02:35.162 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Friday 17 January 2025  10:00:46 -0500 (0:00:00.078)       0:02:35.240 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Friday 17 January 2025  10:00:46 -0500 (0:00:00.070)       0:02:35.311 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Friday 17 January 2025  10:00:46 -0500 (0:00:00.074)       0:02:35.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Friday 17 January 2025  10:00:46 -0500 (0:00:00.070)       0:02:35.456 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Friday 17 January 2025  10:00:46 -0500 (0:00:00.042)       0:02:35.499 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Friday 17 January 2025  10:00:46 -0500 (0:00:00.039)       0:02:35.538 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Friday 17 January 2025  10:00:46 -0500 (0:00:00.043)       0:02:35.581 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Friday 17 January 2025  10:00:46 -0500 (0:00:00.057)       0:02:35.639 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Friday 17 January 2025  10:00:46 -0500 (0:00:00.065)       0:02:35.705 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Friday 17 January 2025  10:00:46 -0500 (0:00:00.069)       0:02:35.775 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Friday 17 January 2025  10:00:46 -0500 (0:00:00.039)       0:02:35.814 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Friday 17 January 2025  10:00:46 -0500 (0:00:00.047)       0:02:35.862 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Friday 17 January 2025  10:00:46 -0500 (0:00:00.043)       0:02:35.905 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Friday 17 January 2025  10:00:46 -0500 (0:00:00.042)       0:02:35.947 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Friday 17 January 2025  10:00:46 -0500 (0:00:00.050)       0:02:35.998 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Friday 17 January 2025  10:00:46 -0500 (0:00:00.056)       0:02:36.055 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Friday 17 January 2025  10:00:46 -0500 (0:00:00.093)       0:02:36.148 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Friday 17 January 2025  10:00:46 -0500 (0:00:00.038)       0:02:36.187 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Friday 17 January 2025  10:00:47 -0500 (0:00:00.042)       0:02:36.230 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Friday 17 January 2025  10:00:47 -0500 (0:00:00.047)       0:02:36.277 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Friday 17 January 2025  10:00:47 -0500 (0:00:00.066)       0:02:36.344 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Friday 17 January 2025  10:00:47 -0500 (0:00:00.065)       0:02:36.410 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Friday 17 January 2025  10:00:47 -0500 (0:00:00.039)       0:02:36.450 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Friday 17 January 2025  10:00:47 -0500 (0:00:00.049)       0:02:36.499 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Friday 17 January 2025  10:00:47 -0500 (0:00:00.140)       0:02:36.639 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

PLAY RECAP *********************************************************************
managed-node3              : ok=494  changed=11   unreachable=0    failed=0    skipped=631  rescued=0    ignored=0   


TASKS RECAP ********************************************************************
Friday 17 January 2025  10:00:47 -0500 (0:00:00.027)       0:02:36.666 ******** 
=============================================================================== 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.84s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.72s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Get service facts ------------------- 2.95s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.95s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.84s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.77s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Make sure blivet is available ------- 1.70s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.69s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
Ensure test packages ---------------------------------------------------- 1.69s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.64s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
Ensure cryptsetup is present -------------------------------------------- 1.64s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure test packages ---------------------------------------------------- 1.63s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 
Ensure cryptsetup is present -------------------------------------------- 1.63s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure cryptsetup is present -------------------------------------------- 1.63s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure cryptsetup is present -------------------------------------------- 1.62s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure cryptsetup is present -------------------------------------------- 1.60s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure cryptsetup is present -------------------------------------------- 1.60s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Find unused disks in the system ----------------------------------------- 1.59s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11 
fedora.linux_system_roles.storage : Make sure required packages are installed --- 1.57s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 
Ensure cryptsetup is present -------------------------------------------- 1.57s
/tmp/collections-0t1/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10