[WARNING]: running playbook inside collection redhat.rhel_system_roles
ansible-playbook [core 2.14.17]
  config file = /etc/ansible/ansible.cfg
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/lib/python3.9/site-packages/ansible
  ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections
  executable location = /usr/bin/ansible-playbook
  python version = 3.9.19 (main, Aug 23 2024, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-2)] (/usr/bin/python3)
  jinja version = 3.1.2
  libyaml = True
Using /etc/ansible/ansible.cfg as config file
statically imported: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/create-test-file.yml
statically imported: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml
statically imported: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/create-test-file.yml
statically imported: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml
statically imported: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/create-test-file.yml
statically imported: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_luks_pool.yml **************************************************
1 plays in /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml

PLAY [Test LUKS pool] **********************************************************

TASK [Gathering Facts] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:2
Monday 21 October 2024  18:21:05 -0400 (0:00:00.018)       0:00:00.018 ******** 
ok: [managed-node2]

TASK [Enable FIPS mode] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:23
Monday 21 October 2024  18:21:07 -0400 (0:00:01.364)       0:00:01.383 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reboot] ******************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:27
Monday 21 October 2024  18:21:07 -0400 (0:00:00.026)       0:00:01.409 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure dracut-fips] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:37
Monday 21 October 2024  18:21:07 -0400 (0:00:00.035)       0:00:01.445 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Configure boot for FIPS] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:43
Monday 21 October 2024  18:21:07 -0400 (0:00:00.045)       0:00:01.491 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reboot] ******************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:52
Monday 21 October 2024  18:21:07 -0400 (0:00:00.033)       0:00:01.524 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Run the role] ************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:56
Monday 21 October 2024  18:21:07 -0400 (0:00:00.042)       0:00:01.567 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:21:07 -0400 (0:00:00.098)       0:00:01.665 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:21:07 -0400 (0:00:00.079)       0:00:01.745 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:21:07 -0400 (0:00:00.074)       0:00:01.819 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:21:07 -0400 (0:00:00.095)       0:00:01.915 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:21:08 -0400 (0:00:00.637)       0:00:02.553 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:21:08 -0400 (0:00:00.045)       0:00:02.598 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:21:08 -0400 (0:00:00.020)       0:00:02.618 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:21:08 -0400 (0:00:00.020)       0:00:02.639 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:21:08 -0400 (0:00:00.049)       0:00:02.689 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:21:09 -0400 (0:00:01.062)       0:00:03.751 ******** 
ok: [managed-node2] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined. 'storage_pools' is undefined"
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:21:09 -0400 (0:00:00.072)       0:00:03.824 ******** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined"
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:21:09 -0400 (0:00:00.068)       0:00:03.892 ******** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:21:10 -0400 (0:00:01.043)       0:00:04.936 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml:2
Monday 21 October 2024  18:21:10 -0400 (0:00:00.065)       0:00:05.001 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Make sure COPR support packages are present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml:13
Monday 21 October 2024  18:21:10 -0400 (0:00:00.067)       0:00:05.069 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable COPRs] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml:19
Monday 21 October 2024  18:21:11 -0400 (0:00:00.084)       0:00:05.153 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:21:11 -0400 (0:00:00.065)       0:00:05.219 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:21:12 -0400 (0:00:00.932)       0:00:06.151 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "cpupower.service": {
                "name": "cpupower.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fcoe.service": {
                "name": "fcoe.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "fwupd-offline-update.service": {
                "name": "fwupd-offline-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "fwupd-refresh.service": {
                "name": "fwupd-refresh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "fwupd.service": {
                "name": "fwupd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "iscsi-shutdown.service": {
                "name": "iscsi-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsi.service": {
                "name": "iscsi.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsid.service": {
                "name": "iscsid.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-activation-early.service": {
                "name": "lvm2-activation-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "ndctl-monitor.service": {
                "name": "ndctl-monitor.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "oddjobd.service": {
                "name": "oddjobd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "raid-check.service": {
                "name": "raid-check.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "rbdmap.service": {
                "name": "rbdmap.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rdisc.service": {
                "name": "rdisc.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rhcd.service": {
                "name": "rhcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rhsm-facts.service": {
                "name": "rhsm-facts.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rhsm.service": {
                "name": "rhsm.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rhsmcertd.service": {
                "name": "rhsmcertd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "snapd.seeded.service": {
                "name": "snapd.seeded.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-fsck@dev-disk-by\\x2duuid-7B77\\x2d95E7.service": {
                "name": "systemd-fsck@dev-disk-by\\x2duuid-7B77\\x2d95E7.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "systemd-fsck@dev-xvda2.service": {
                "name": "systemd-fsck@dev-xvda2.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume@.service": {
                "name": "systemd-hibernate-resume@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles.service": {
                "name": "systemd-tmpfiles.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "teamd@.service": {
                "name": "teamd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "udisks2.service": {
                "name": "udisks2.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "yppasswdd.service": {
                "name": "yppasswdd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ypserv.service": {
                "name": "ypserv.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ypxfrd.service": {
                "name": "ypxfrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:21:14 -0400 (0:00:02.200)       0:00:08.352 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:21:14 -0400 (0:00:00.121)       0:00:08.474 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:21:14 -0400 (0:00:00.045)       0:00:08.519 ******** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [redhat.rhel_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 21 October 2024  18:21:15 -0400 (0:00:00.604)       0:00:09.124 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if /etc/fstab is present] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 21 October 2024  18:21:15 -0400 (0:00:00.047)       0:00:09.171 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549254.802703,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "d505df7dbf933786a776627e11e33976edc30b5a",
        "ctime": 1729549254.801703,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 822083726,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1729549254.801703,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1123,
        "uid": 0,
        "version": "1383220658",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 21 October 2024  18:21:15 -0400 (0:00:00.385)       0:00:09.557 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:21:15 -0400 (0:00:00.058)       0:00:09.615 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Show blivet_output] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 21 October 2024  18:21:15 -0400 (0:00:00.046)       0:00:09.662 ******** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [redhat.rhel_system_roles.storage : Set the list of pools for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 21 October 2024  18:21:15 -0400 (0:00:00.045)       0:00:09.707 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Set the list of volumes for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 21 October 2024  18:21:15 -0400 (0:00:00.056)       0:00:09.763 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Remove obsolete mounts] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 21 October 2024  18:21:15 -0400 (0:00:00.029)       0:00:09.793 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 21 October 2024  18:21:15 -0400 (0:00:00.055)       0:00:09.848 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set up new/current mounts] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 21 October 2024  18:21:15 -0400 (0:00:00.034)       0:00:09.883 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage mount ownership/permissions] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 21 October 2024  18:21:15 -0400 (0:00:00.057)       0:00:09.940 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 21 October 2024  18:21:15 -0400 (0:00:00.056)       0:00:09.997 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 21 October 2024  18:21:15 -0400 (0:00:00.032)       0:00:10.029 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [redhat.rhel_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 21 October 2024  18:21:16 -0400 (0:00:00.376)       0:00:10.406 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Update facts] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 21 October 2024  18:21:16 -0400 (0:00:00.027)       0:00:10.434 ******** 
ok: [managed-node2]

TASK [Mark tasks to be skipped] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:60
Monday 21 October 2024  18:21:17 -0400 (0:00:00.946)       0:00:11.380 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Get unused disks] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:66
Monday 21 October 2024  18:21:17 -0400 (0:00:00.055)       0:00:11.436 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/get_unused_disk.yml for managed-node2

TASK [Ensure test packages] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/get_unused_disk.yml:2
Monday 21 October 2024  18:21:17 -0400 (0:00:00.051)       0:00:11.488 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Find unused disks in the system] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/get_unused_disk.yml:11
Monday 21 October 2024  18:21:18 -0400 (0:00:00.885)       0:00:12.373 ******** 
ok: [managed-node2] => {
    "changed": false,
    "disks": [
        "sda"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"209715200\" FSTYPE=\"vfat\" LOG_SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"209715200\" FSTYPE=\"vfat\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda3\" TYPE=\"part\" SIZE=\"1073741824\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda3\" TYPE=\"part\" SIZE=\"1073741824\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda4\" TYPE=\"part\" SIZE=\"267149884928\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda4\" TYPE=\"part\" SIZE=\"267149884928\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "filename [xvda4] is a partition",
        "filename [xvda2] is a partition",
        "filename [xvda3] is a partition",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/get_unused_disk.yml:20
Monday 21 October 2024  18:21:18 -0400 (0:00:00.569)       0:00:12.943 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/get_unused_disk.yml:29
Monday 21 October 2024  18:21:18 -0400 (0:00:00.024)       0:00:12.968 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "unused_disks": [
            "sda"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/get_unused_disk.yml:34
Monday 21 October 2024  18:21:18 -0400 (0:00:00.034)       0:00:13.003 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/get_unused_disk.yml:39
Monday 21 October 2024  18:21:18 -0400 (0:00:00.044)       0:00:13.047 ******** 
ok: [managed-node2] => {
    "unused_disks": [
        "sda"
    ]
}

TASK [Test for correct handling of new encrypted pool w/ no key] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:76
Monday 21 October 2024  18:21:19 -0400 (0:00:00.031)       0:00:13.078 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml for managed-node2

TASK [Store global variable value copy] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:4
Monday 21 October 2024  18:21:19 -0400 (0:00:00.035)       0:00:13.114 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": true,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:10
Monday 21 October 2024  18:21:19 -0400 (0:00:00.038)       0:00:13.153 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:21:19 -0400 (0:00:00.036)       0:00:13.190 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:21:19 -0400 (0:00:00.034)       0:00:13.224 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:21:19 -0400 (0:00:00.046)       0:00:13.270 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:21:19 -0400 (0:00:00.075)       0:00:13.346 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:21:19 -0400 (0:00:00.034)       0:00:13.380 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:21:19 -0400 (0:00:00.037)       0:00:13.418 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:21:19 -0400 (0:00:00.030)       0:00:13.449 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:21:19 -0400 (0:00:00.028)       0:00:13.477 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:21:19 -0400 (0:00:00.070)       0:00:13.548 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:21:19 -0400 (0:00:00.045)       0:00:13.593 ******** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:21:19 -0400 (0:00:00.043)       0:00:13.637 ******** 
ok: [managed-node2] => {
    "storage_volumes": []
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:21:19 -0400 (0:00:00.045)       0:00:13.682 ******** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [
        "cryptsetup",
        "lvm2"
    ],
    "pools": [],
    "volumes": []
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:21:20 -0400 (0:00:01.222)       0:00:14.904 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml:2
Monday 21 October 2024  18:21:20 -0400 (0:00:00.121)       0:00:15.025 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Make sure COPR support packages are present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml:13
Monday 21 October 2024  18:21:21 -0400 (0:00:00.062)       0:00:15.088 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable COPRs] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/enable_coprs.yml:19
Monday 21 October 2024  18:21:21 -0400 (0:00:00.063)       0:00:15.152 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:21:21 -0400 (0:00:00.064)       0:00:15.216 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:21:22 -0400 (0:00:00.940)       0:00:16.157 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:21:22 -0400 (0:00:00.027)       0:00:16.184 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:21:22 -0400 (0:00:00.054)       0:00:16.239 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:21:22 -0400 (0:00:00.019)       0:00:16.258 ******** 
fatal: [managed-node2]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

encrypted pool 'foo' missing key/password

TASK [redhat.rhel_system_roles.storage : Failed message] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:109
Monday 21 October 2024  18:21:23 -0400 (0:00:01.114)       0:00:17.373 ******** 
fatal: [managed-node2]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "encrypted pool 'foo' missing key/password", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': None, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': None, 'fs_label': None, 'fs_type': None, 'mount_options': None, 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': None, 'cached': None, 'cache_devices': [], 'cache_mode': None, 'cache_size': None, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:21:23 -0400 (0:00:00.051)       0:00:17.424 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:23
Monday 21 October 2024  18:21:23 -0400 (0:00:00.033)       0:00:17.458 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:28
Monday 21 October 2024  18:21:23 -0400 (0:00:00.041)       0:00:17.500 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:39
Monday 21 October 2024  18:21:23 -0400 (0:00:00.062)       0:00:17.562 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Mark tasks to be skipped] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:93
Monday 21 October 2024  18:21:23 -0400 (0:00:00.042)       0:00:17.605 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Create a key file] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:102
Monday 21 October 2024  18:21:23 -0400 (0:00:00.061)       0:00:17.666 ******** 
ok: [managed-node2] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0600",
    "owner": "root",
    "path": "/tmp/storage_testkt75tsn3lukskey",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Write the key into the key file] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:109
Monday 21 October 2024  18:21:24 -0400 (0:00:00.697)       0:00:18.364 ******** 
ok: [managed-node2] => {
    "changed": false,
    "checksum": "7a4dff3752e2baf5617c57eaac048e2b95e8af91",
    "dest": "/tmp/storage_testkt75tsn3lukskey",
    "gid": 0,
    "group": "root",
    "md5sum": "4ac07b967150835c00d0865161e48744",
    "mode": "0600",
    "owner": "root",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 32,
    "src": "/root/.ansible/tmp/ansible-tmp-1729549284.3616312-121429-79554466048490/source",
    "state": "file",
    "uid": 0
}

TASK [Create an encrypted lvm pool using a key file] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:116
Monday 21 October 2024  18:21:25 -0400 (0:00:01.195)       0:00:19.559 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:21:25 -0400 (0:00:00.087)       0:00:19.647 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:21:25 -0400 (0:00:00.090)       0:00:19.737 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:21:25 -0400 (0:00:00.144)       0:00:19.881 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:21:25 -0400 (0:00:00.086)       0:00:19.967 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:21:25 -0400 (0:00:00.041)       0:00:20.008 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:21:25 -0400 (0:00:00.060)       0:00:20.069 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:21:26 -0400 (0:00:00.040)       0:00:20.109 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:21:26 -0400 (0:00:00.082)       0:00:20.191 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:21:26 -0400 (0:00:00.143)       0:00:20.335 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:21:26 -0400 (0:00:00.069)       0:00:20.404 ******** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_key": "/tmp/storage_testkt75tsn3lukskey",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:21:26 -0400 (0:00:00.068)       0:00:20.472 ******** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined"
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:21:26 -0400 (0:00:00.057)       0:00:20.529 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:21:26 -0400 (0:00:00.047)       0:00:20.577 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:21:26 -0400 (0:00:00.045)       0:00:20.623 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:21:26 -0400 (0:00:00.046)       0:00:20.670 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:21:26 -0400 (0:00:00.047)       0:00:20.718 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:21:26 -0400 (0:00:00.100)       0:00:20.818 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:21:26 -0400 (0:00:00.042)       0:00:20.860 ******** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "luks"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-286422e3-4efe-4f44-9bea-3dafa1288253",
            "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "state": "present"
        }
    ],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/xvda3",
        "/dev/xvda4",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "lvm2",
        "dosfstools",
        "cryptsetup"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [redhat.rhel_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 21 October 2024  18:21:37 -0400 (0:00:10.302)       0:00:31.162 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if /etc/fstab is present] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 21 October 2024  18:21:37 -0400 (0:00:00.039)       0:00:31.202 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549254.802703,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "d505df7dbf933786a776627e11e33976edc30b5a",
        "ctime": 1729549254.801703,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 822083726,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1729549254.801703,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1123,
        "uid": 0,
        "version": "1383220658",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 21 October 2024  18:21:37 -0400 (0:00:00.368)       0:00:31.570 ******** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:21:37 -0400 (0:00:00.495)       0:00:32.066 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Show blivet_output] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 21 October 2024  18:21:38 -0400 (0:00:00.019)       0:00:32.086 ******** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "luks"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-286422e3-4efe-4f44-9bea-3dafa1288253",
                "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "state": "present"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/xvda3",
            "/dev/xvda4",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "lvm2",
            "dosfstools",
            "cryptsetup"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [redhat.rhel_system_roles.storage : Set the list of pools for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 21 October 2024  18:21:38 -0400 (0:00:00.029)       0:00:32.116 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Set the list of volumes for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 21 October 2024  18:21:38 -0400 (0:00:00.027)       0:00:32.143 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Remove obsolete mounts] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 21 October 2024  18:21:38 -0400 (0:00:00.024)       0:00:32.168 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 21 October 2024  18:21:38 -0400 (0:00:00.037)       0:00:32.206 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Set up new/current mounts] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 21 October 2024  18:21:39 -0400 (0:00:01.033)       0:00:33.239 ******** 
changed: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [redhat.rhel_system_roles.storage : Manage mount ownership/permissions] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 21 October 2024  18:21:39 -0400 (0:00:00.594)       0:00:33.833 ******** 
skipping: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 21 October 2024  18:21:39 -0400 (0:00:00.048)       0:00:33.882 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 21 October 2024  18:21:40 -0400 (0:00:00.746)       0:00:34.628 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [redhat.rhel_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 21 October 2024  18:21:40 -0400 (0:00:00.383)       0:00:35.012 ******** 
changed: [managed-node2] => (item={'backing_device': '/dev/sda', 'name': 'luks-286422e3-4efe-4f44-9bea-3dafa1288253', 'password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'state': 'present'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-286422e3-4efe-4f44-9bea-3dafa1288253",
        "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
        "state": "present"
    }
}

MSG:

line added and ownership, perms or SE linux context changed

TASK [redhat.rhel_system_roles.storage : Update facts] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 21 October 2024  18:21:41 -0400 (0:00:00.378)       0:00:35.390 ******** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:131
Monday 21 October 2024  18:21:42 -0400 (0:00:00.936)       0:00:36.327 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:2
Monday 21 October 2024  18:21:42 -0400 (0:00:00.037)       0:00:36.364 ******** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:7
Monday 21 October 2024  18:21:42 -0400 (0:00:00.041)       0:00:36.406 ******** 
skipping: [managed-node2] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:15
Monday 21 October 2024  18:21:42 -0400 (0:00:00.033)       0:00:36.439 ******** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "9fa36e8e-43bd-438e-a3f7-06a3bb2a115b"
        },
        "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
            "size": "10G",
            "type": "crypt",
            "uuid": "Jv4cIu-bfm7-xCn0-6zyY-zVku-Av9M-5AjZDF"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "286422e3-4efe-4f44-9bea-3dafa1288253"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "vfat",
            "label": "",
            "mountpoint": "/boot/efi",
            "name": "/dev/xvda2",
            "size": "200M",
            "type": "partition",
            "uuid": "7B77-95E7"
        },
        "/dev/xvda3": {
            "fstype": "xfs",
            "label": "boot",
            "mountpoint": "/boot",
            "name": "/dev/xvda3",
            "size": "1G",
            "type": "partition",
            "uuid": "a8cc2a47-4cf2-4d6f-8916-f69641ec5919"
        },
        "/dev/xvda4": {
            "fstype": "xfs",
            "label": "root",
            "mountpoint": "/",
            "name": "/dev/xvda4",
            "size": "248.8G",
            "type": "partition",
            "uuid": "1b4086e3-4d44-4b6e-99dc-43b96b9fea96"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:20
Monday 21 October 2024  18:21:42 -0400 (0:00:00.564)       0:00:37.004 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003013",
    "end": "2024-10-21 18:21:43.335814",
    "rc": 0,
    "start": "2024-10-21 18:21:43.332801"
}

STDOUT:

UUID=1b4086e3-4d44-4b6e-99dc-43b96b9fea96	/	xfs	defaults	0	0
UUID=a8cc2a47-4cf2-4d6f-8916-f69641ec5919	/boot	xfs	defaults	0	0
UUID=7B77-95E7	/boot/efi	vfat	defaults,uid=0,gid=0,umask=077,shortname=winnt	0	2
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
# system_role:storage
/dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:25
Monday 21 October 2024  18:21:43 -0400 (0:00:00.460)       0:00:37.464 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002974",
    "end": "2024-10-21 18:21:43.692997",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-10-21 18:21:43.690023"
}

STDOUT:

luks-286422e3-4efe-4f44-9bea-3dafa1288253 /dev/sda VALUE_SPECIFIED_IN_NO_LOG_PARAMETER

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:34
Monday 21 October 2024  18:21:43 -0400 (0:00:00.363)       0:00:37.828 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:5
Monday 21 October 2024  18:21:43 -0400 (0:00:00.084)       0:00:37.913 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:18
Monday 21 October 2024  18:21:43 -0400 (0:00:00.043)       0:00:37.956 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028563",
    "end": "2024-10-21 18:21:44.237655",
    "rc": 0,
    "start": "2024-10-21 18:21:44.209092"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:24
Monday 21 October 2024  18:21:44 -0400 (0:00:00.416)       0:00:38.373 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:34
Monday 21 October 2024  18:21:44 -0400 (0:00:00.047)       0:00:38.420 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 21 October 2024  18:21:44 -0400 (0:00:00.075)       0:00:38.495 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 21 October 2024  18:21:44 -0400 (0:00:00.068)       0:00:38.564 ******** 
ok: [managed-node2] => (item=/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
    "pv": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253"
}

TASK [Set pvs lvm length] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 21 October 2024  18:21:45 -0400 (0:00:00.594)       0:00:39.159 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 21 October 2024  18:21:45 -0400 (0:00:00.064)       0:00:39.224 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 21 October 2024  18:21:45 -0400 (0:00:00.060)       0:00:39.285 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 21 October 2024  18:21:45 -0400 (0:00:00.053)       0:00:39.339 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "crypt"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 21 October 2024  18:21:45 -0400 (0:00:00.044)       0:00:39.383 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 21 October 2024  18:21:45 -0400 (0:00:00.036)       0:00:39.420 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 21 October 2024  18:21:45 -0400 (0:00:00.035)       0:00:39.455 ******** 
ok: [managed-node2] => (item=/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 21 October 2024  18:21:45 -0400 (0:00:00.049)       0:00:39.505 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:172893): WARNING **: 18:21:45.726: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

Shared connection to 10.31.8.235 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:73
Monday 21 October 2024  18:21:45 -0400 (0:00:00.440)       0:00:39.945 ******** 
skipping: [managed-node2] => (item=/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:83
Monday 21 October 2024  18:21:45 -0400 (0:00:00.111)       0:00:40.056 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:8
Monday 21 October 2024  18:21:46 -0400 (0:00:00.059)       0:00:40.116 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:14
Monday 21 October 2024  18:21:46 -0400 (0:00:00.024)       0:00:40.140 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:19
Monday 21 October 2024  18:21:46 -0400 (0:00:00.023)       0:00:40.163 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:24
Monday 21 October 2024  18:21:46 -0400 (0:00:00.022)       0:00:40.186 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:29
Monday 21 October 2024  18:21:46 -0400 (0:00:00.021)       0:00:40.208 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:37
Monday 21 October 2024  18:21:46 -0400 (0:00:00.022)       0:00:40.231 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:46
Monday 21 October 2024  18:21:46 -0400 (0:00:00.022)       0:00:40.253 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:55
Monday 21 October 2024  18:21:46 -0400 (0:00:00.021)       0:00:40.275 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:64
Monday 21 October 2024  18:21:46 -0400 (0:00:00.021)       0:00:40.297 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:74
Monday 21 October 2024  18:21:46 -0400 (0:00:00.022)       0:00:40.319 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:83
Monday 21 October 2024  18:21:46 -0400 (0:00:00.021)       0:00:40.341 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:86
Monday 21 October 2024  18:21:46 -0400 (0:00:00.026)       0:00:40.368 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 21 October 2024  18:21:46 -0400 (0:00:00.064)       0:00:40.432 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Monday 21 October 2024  18:21:46 -0400 (0:00:00.071)       0:00:40.503 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Monday 21 October 2024  18:21:46 -0400 (0:00:00.035)       0:00:40.539 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Monday 21 October 2024  18:21:46 -0400 (0:00:00.044)       0:00:40.583 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Monday 21 October 2024  18:21:46 -0400 (0:00:00.056)       0:00:40.639 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Monday 21 October 2024  18:21:46 -0400 (0:00:00.055)       0:00:40.694 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Monday 21 October 2024  18:21:46 -0400 (0:00:00.052)       0:00:40.747 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Monday 21 October 2024  18:21:46 -0400 (0:00:00.051)       0:00:40.799 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:89
Monday 21 October 2024  18:21:46 -0400 (0:00:00.035)       0:00:40.835 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 21 October 2024  18:21:46 -0400 (0:00:00.051)       0:00:40.886 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about thinpool] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:8
Monday 21 October 2024  18:21:46 -0400 (0:00:00.041)       0:00:40.928 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:16
Monday 21 October 2024  18:21:46 -0400 (0:00:00.021)       0:00:40.949 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:22
Monday 21 October 2024  18:21:46 -0400 (0:00:00.022)       0:00:40.972 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:26
Monday 21 October 2024  18:21:46 -0400 (0:00:00.021)       0:00:40.994 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:92
Monday 21 October 2024  18:21:46 -0400 (0:00:00.023)       0:00:41.017 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 21 October 2024  18:21:47 -0400 (0:00:00.089)       0:00:41.106 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 21 October 2024  18:21:47 -0400 (0:00:00.103)       0:00:41.210 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml for managed-node2 => (item=/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253)

TASK [Get the backing device path] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:2
Monday 21 October 2024  18:21:47 -0400 (0:00:00.116)       0:00:41.327 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/disk/by-uuid/286422e3-4efe-4f44-9bea-3dafa1288253"
    ],
    "delta": "0:00:00.003054",
    "end": "2024-10-21 18:21:47.645551",
    "rc": 0,
    "start": "2024-10-21 18:21:47.642497"
}

STDOUT:

/dev/sda

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:12
Monday 21 October 2024  18:21:47 -0400 (0:00:00.477)       0:00:41.804 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this member] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:18
Monday 21 October 2024  18:21:48 -0400 (0:00:00.891)       0:00:42.696 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cryptsetup",
        "luksDump",
        "/dev/sda"
    ],
    "delta": "0:00:00.006787",
    "end": "2024-10-21 18:21:48.991756",
    "rc": 0,
    "start": "2024-10-21 18:21:48.984969"
}

STDOUT:

LUKS header information
Version:       	2
Epoch:         	3
Metadata area: 	16384 [bytes]
Keyslots area: 	16744448 [bytes]
UUID:          	286422e3-4efe-4f44-9bea-3dafa1288253
Label:         	(no label)
Subsystem:     	(no subsystem)
Flags:       	(no flags)

Data segments:
  0: crypt
	offset: 16777216 [bytes]
	length: (whole device)
	cipher: aes-xts-plain64
	sector: 512 [bytes]

Keyslots:
  0: luks2
	Key:        512 bits
	Priority:   normal
	Cipher:     aes-xts-plain64
	Cipher key: 512 bits
	PBKDF:      argon2id
	Time cost:  4
	Memory:     682664
	Threads:    2
	Salt:       07 fb 75 7c 2a c3 a2 27 f5 e2 f7 27 4c 28 c4 eb 
	            05 20 4d c3 a9 c8 e5 2f ab bd c0 b5 e3 b9 5d dc 
	AF stripes: 4000
	AF hash:    sha256
	Area offset:32768 [bytes]
	Area length:258048 [bytes]
	Digest ID:  0
Tokens:
Digests:
  0: pbkdf2
	Hash:       sha256
	Iterations: 107084
	Salt:       a6 b5 55 1f e9 e7 10 5b d0 de 46 f6 af 55 55 b2 
	            2f ad 93 e8 52 3c 06 d5 68 e4 82 c8 7f 76 6c ca 
	Digest:     15 54 1d 4c 19 e8 48 e0 dc 7a 1a 4b dd ae e2 57 
	            6a 1d cd 00 78 15 64 4e ad e2 98 fd 58 73 9a aa 

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:26
Monday 21 October 2024  18:21:49 -0400 (0:00:00.427)       0:00:43.123 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:38
Monday 21 October 2024  18:21:49 -0400 (0:00:00.025)       0:00:43.149 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:50
Monday 21 October 2024  18:21:49 -0400 (0:00:00.024)       0:00:43.173 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 21 October 2024  18:21:49 -0400 (0:00:00.024)       0:00:43.198 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253)

TASK [Set variables used by tests] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Monday 21 October 2024  18:21:49 -0400 (0:00:00.047)       0:00:43.245 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [
            "luks-286422e3-4efe-4f44-9bea-3dafa1288253 /dev/sda VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
        ]
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Monday 21 October 2024  18:21:49 -0400 (0:00:00.045)       0:00:43.291 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Monday 21 October 2024  18:21:49 -0400 (0:00:00.041)       0:00:43.332 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Monday 21 October 2024  18:21:49 -0400 (0:00:00.044)       0:00:43.376 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Monday 21 October 2024  18:21:49 -0400 (0:00:00.034)       0:00:43.411 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Monday 21 October 2024  18:21:49 -0400 (0:00:00.042)       0:00:43.453 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 21 October 2024  18:21:49 -0400 (0:00:00.024)       0:00:43.477 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:95
Monday 21 October 2024  18:21:49 -0400 (0:00:00.025)       0:00:43.502 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 21 October 2024  18:21:49 -0400 (0:00:00.049)       0:00:43.552 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about VDO deduplication] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Monday 21 October 2024  18:21:49 -0400 (0:00:00.046)       0:00:43.599 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Monday 21 October 2024  18:21:49 -0400 (0:00:00.024)       0:00:43.623 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Monday 21 October 2024  18:21:49 -0400 (0:00:00.023)       0:00:43.646 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Monday 21 October 2024  18:21:49 -0400 (0:00:00.022)       0:00:43.669 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Monday 21 October 2024  18:21:49 -0400 (0:00:00.024)       0:00:43.694 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Monday 21 October 2024  18:21:49 -0400 (0:00:00.023)       0:00:43.718 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Monday 21 October 2024  18:21:49 -0400 (0:00:00.023)       0:00:43.741 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:98
Monday 21 October 2024  18:21:49 -0400 (0:00:00.030)       0:00:43.772 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 21 October 2024  18:21:49 -0400 (0:00:00.079)       0:00:43.852 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 21 October 2024  18:21:49 -0400 (0:00:00.040)       0:00:43.893 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 21 October 2024  18:21:49 -0400 (0:00:00.038)       0:00:43.931 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 21 October 2024  18:21:49 -0400 (0:00:00.089)       0:00:44.020 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 21 October 2024  18:21:50 -0400 (0:00:00.052)       0:00:44.073 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 21 October 2024  18:21:50 -0400 (0:00:00.043)       0:00:44.117 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:101
Monday 21 October 2024  18:21:50 -0400 (0:00:00.080)       0:00:44.198 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 21 October 2024  18:21:50 -0400 (0:00:00.065)       0:00:44.263 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Set storage volume test variables] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:2
Monday 21 October 2024  18:21:50 -0400 (0:00:00.070)       0:00:44.333 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:19
Monday 21 October 2024  18:21:50 -0400 (0:00:00.077)       0:00:44.411 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 21 October 2024  18:21:50 -0400 (0:00:00.228)       0:00:44.640 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 21 October 2024  18:21:50 -0400 (0:00:00.055)       0:00:44.695 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 21 October 2024  18:21:50 -0400 (0:00:00.067)       0:00:44.763 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 21 October 2024  18:21:50 -0400 (0:00:00.028)       0:00:44.791 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 21 October 2024  18:21:50 -0400 (0:00:00.032)       0:00:44.824 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 21 October 2024  18:21:50 -0400 (0:00:00.022)       0:00:44.847 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 21 October 2024  18:21:50 -0400 (0:00:00.023)       0:00:44.870 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 21 October 2024  18:21:50 -0400 (0:00:00.027)       0:00:44.898 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 21 October 2024  18:21:50 -0400 (0:00:00.029)       0:00:44.927 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 21 October 2024  18:21:50 -0400 (0:00:00.035)       0:00:44.963 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 21 October 2024  18:21:50 -0400 (0:00:00.028)       0:00:44.992 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 21 October 2024  18:21:50 -0400 (0:00:00.049)       0:00:45.041 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 21 October 2024  18:21:51 -0400 (0:00:00.099)       0:00:45.141 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 21 October 2024  18:21:51 -0400 (0:00:00.070)       0:00:45.211 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 21 October 2024  18:21:51 -0400 (0:00:00.056)       0:00:45.268 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 21 October 2024  18:21:51 -0400 (0:00:00.041)       0:00:45.309 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Monday 21 October 2024  18:21:51 -0400 (0:00:00.027)       0:00:45.337 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 21 October 2024  18:21:51 -0400 (0:00:00.025)       0:00:45.362 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 21 October 2024  18:21:51 -0400 (0:00:00.046)       0:00:45.409 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 21 October 2024  18:21:51 -0400 (0:00:00.067)       0:00:45.476 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549301.2709112,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1729549296.976892,
        "dev": 5,
        "device_type": 64769,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 7908,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1729549296.976892,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 21 October 2024  18:21:51 -0400 (0:00:00.401)       0:00:45.878 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 21 October 2024  18:21:51 -0400 (0:00:00.035)       0:00:45.913 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 21 October 2024  18:21:51 -0400 (0:00:00.034)       0:00:45.948 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 21 October 2024  18:21:51 -0400 (0:00:00.044)       0:00:45.993 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 21 October 2024  18:21:51 -0400 (0:00:00.041)       0:00:46.035 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 21 October 2024  18:21:52 -0400 (0:00:00.090)       0:00:46.126 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 21 October 2024  18:21:52 -0400 (0:00:00.041)       0:00:46.167 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 21 October 2024  18:21:52 -0400 (0:00:00.029)       0:00:46.197 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 21 October 2024  18:21:52 -0400 (0:00:00.802)       0:00:46.999 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 21 October 2024  18:21:52 -0400 (0:00:00.023)       0:00:47.022 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 21 October 2024  18:21:52 -0400 (0:00:00.022)       0:00:47.045 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 21 October 2024  18:21:53 -0400 (0:00:00.042)       0:00:47.087 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 21 October 2024  18:21:53 -0400 (0:00:00.024)       0:00:47.112 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 21 October 2024  18:21:53 -0400 (0:00:00.045)       0:00:47.158 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 21 October 2024  18:21:53 -0400 (0:00:00.042)       0:00:47.200 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 21 October 2024  18:21:53 -0400 (0:00:00.041)       0:00:47.242 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 21 October 2024  18:21:53 -0400 (0:00:00.044)       0:00:47.287 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 21 October 2024  18:21:53 -0400 (0:00:00.086)       0:00:47.374 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 21 October 2024  18:21:53 -0400 (0:00:00.094)       0:00:47.468 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 21 October 2024  18:21:53 -0400 (0:00:00.100)       0:00:47.568 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 21 October 2024  18:21:53 -0400 (0:00:00.064)       0:00:47.633 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 21 October 2024  18:21:53 -0400 (0:00:00.070)       0:00:47.704 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 21 October 2024  18:21:53 -0400 (0:00:00.053)       0:00:47.757 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 21 October 2024  18:21:53 -0400 (0:00:00.048)       0:00:47.805 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 21 October 2024  18:21:53 -0400 (0:00:00.039)       0:00:47.844 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 21 October 2024  18:21:53 -0400 (0:00:00.037)       0:00:47.881 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 21 October 2024  18:21:53 -0400 (0:00:00.039)       0:00:47.921 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 21 October 2024  18:21:53 -0400 (0:00:00.030)       0:00:47.951 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 21 October 2024  18:21:53 -0400 (0:00:00.028)       0:00:47.980 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 21 October 2024  18:21:53 -0400 (0:00:00.028)       0:00:48.009 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 21 October 2024  18:21:53 -0400 (0:00:00.022)       0:00:48.032 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 21 October 2024  18:21:53 -0400 (0:00:00.024)       0:00:48.056 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 21 October 2024  18:21:54 -0400 (0:00:00.022)       0:00:48.078 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 21 October 2024  18:21:54 -0400 (0:00:00.640)       0:00:48.719 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 21 October 2024  18:21:55 -0400 (0:00:00.444)       0:00:49.163 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 21 October 2024  18:21:55 -0400 (0:00:00.071)       0:00:49.234 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 21 October 2024  18:21:55 -0400 (0:00:00.087)       0:00:49.322 ******** 
ok: [managed-node2] => {
    "bytes": 10715943403,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 21 October 2024  18:21:55 -0400 (0:00:00.407)       0:00:49.729 ******** 
skipping: [managed-node2] => {}

TASK [Show test blockinfo] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 21 October 2024  18:21:55 -0400 (0:00:00.039)       0:00:49.769 ******** 
skipping: [managed-node2] => {}

TASK [Show test pool size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 21 October 2024  18:21:55 -0400 (0:00:00.041)       0:00:49.811 ******** 
skipping: [managed-node2] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 21 October 2024  18:21:55 -0400 (0:00:00.058)       0:00:49.869 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 21 October 2024  18:21:55 -0400 (0:00:00.060)       0:00:49.929 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 21 October 2024  18:21:55 -0400 (0:00:00.028)       0:00:49.958 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 21 October 2024  18:21:55 -0400 (0:00:00.026)       0:00:49.985 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 21 October 2024  18:21:55 -0400 (0:00:00.027)       0:00:50.012 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 21 October 2024  18:21:55 -0400 (0:00:00.034)       0:00:50.047 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 21 October 2024  18:21:56 -0400 (0:00:00.026)       0:00:50.074 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 21 October 2024  18:21:56 -0400 (0:00:00.024)       0:00:50.098 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 21 October 2024  18:21:56 -0400 (0:00:00.023)       0:00:50.122 ******** 
skipping: [managed-node2] => {}

TASK [Show volume thin pool size] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 21 October 2024  18:21:56 -0400 (0:00:00.021)       0:00:50.144 ******** 
skipping: [managed-node2] => {}

TASK [Show test volume size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 21 October 2024  18:21:56 -0400 (0:00:00.021)       0:00:50.165 ******** 
skipping: [managed-node2] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 21 October 2024  18:21:56 -0400 (0:00:00.021)       0:00:50.187 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 21 October 2024  18:21:56 -0400 (0:00:00.024)       0:00:50.211 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 21 October 2024  18:21:56 -0400 (0:00:00.022)       0:00:50.234 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 21 October 2024  18:21:56 -0400 (0:00:00.022)       0:00:50.256 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 21 October 2024  18:21:56 -0400 (0:00:00.022)       0:00:50.278 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 21 October 2024  18:21:56 -0400 (0:00:00.025)       0:00:50.304 ******** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 21 October 2024  18:21:56 -0400 (0:00:00.037)       0:00:50.341 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 21 October 2024  18:21:56 -0400 (0:00:00.026)       0:00:50.367 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 21 October 2024  18:21:56 -0400 (0:00:00.045)       0:00:50.412 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.031172",
    "end": "2024-10-21 18:21:56.669657",
    "rc": 0,
    "start": "2024-10-21 18:21:56.638485"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 21 October 2024  18:21:56 -0400 (0:00:00.398)       0:00:50.811 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 21 October 2024  18:21:56 -0400 (0:00:00.049)       0:00:50.861 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 21 October 2024  18:21:56 -0400 (0:00:00.052)       0:00:50.913 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 21 October 2024  18:21:56 -0400 (0:00:00.041)       0:00:50.955 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 21 October 2024  18:21:56 -0400 (0:00:00.037)       0:00:50.993 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 21 October 2024  18:21:56 -0400 (0:00:00.035)       0:00:51.029 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:25
Monday 21 October 2024  18:21:56 -0400 (0:00:00.036)       0:00:51.065 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:44
Monday 21 October 2024  18:21:57 -0400 (0:00:00.025)       0:00:51.091 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:54
Monday 21 October 2024  18:21:57 -0400 (0:00:00.022)       0:00:51.113 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Remove the key file] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:134
Monday 21 October 2024  18:21:57 -0400 (0:00:00.029)       0:00:51.142 ******** 
ok: [managed-node2] => {
    "changed": false,
    "path": "/tmp/storage_testkt75tsn3lukskey",
    "state": "absent"
}

TASK [Create a file] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/create-test-file.yml:12
Monday 21 October 2024  18:21:57 -0400 (0:00:00.677)       0:00:51.820 ******** 
changed: [managed-node2] => {
    "changed": true,
    "dest": "/opt/test1/quux",
    "gid": 0,
    "group": "root",
    "mode": "0644",
    "owner": "root",
    "secontext": "unconfined_u:object_r:unlabeled_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Test for correct handling of safe_mode] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:143
Monday 21 October 2024  18:21:58 -0400 (0:00:00.419)       0:00:52.240 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml for managed-node2

TASK [Store global variable value copy] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:4
Monday 21 October 2024  18:21:58 -0400 (0:00:00.065)       0:00:52.305 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": true,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:10
Monday 21 October 2024  18:21:58 -0400 (0:00:00.071)       0:00:52.377 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:21:58 -0400 (0:00:00.085)       0:00:52.463 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:21:58 -0400 (0:00:00.067)       0:00:52.531 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:21:58 -0400 (0:00:00.070)       0:00:52.601 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:21:58 -0400 (0:00:00.095)       0:00:52.697 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:21:58 -0400 (0:00:00.048)       0:00:52.745 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:21:58 -0400 (0:00:00.049)       0:00:52.795 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:21:58 -0400 (0:00:00.043)       0:00:52.838 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:21:58 -0400 (0:00:00.041)       0:00:52.879 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:21:58 -0400 (0:00:00.103)       0:00:52.983 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:21:58 -0400 (0:00:00.049)       0:00:53.033 ******** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:21:59 -0400 (0:00:00.052)       0:00:53.085 ******** 
ok: [managed-node2] => {
    "storage_volumes": []
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:21:59 -0400 (0:00:00.051)       0:00:53.136 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:21:59 -0400 (0:00:00.050)       0:00:53.187 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:21:59 -0400 (0:00:00.051)       0:00:53.238 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:21:59 -0400 (0:00:00.052)       0:00:53.291 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:21:59 -0400 (0:00:00.051)       0:00:53.342 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:21:59 -0400 (0:00:00.101)       0:00:53.443 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:21:59 -0400 (0:00:00.044)       0:00:53.488 ******** 
fatal: [managed-node2]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

cannot remove and recreate existing pool 'foo' in safe mode

TASK [redhat.rhel_system_roles.storage : Failed message] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:109
Monday 21 October 2024  18:22:01 -0400 (0:00:01.739)       0:00:55.227 ******** 
fatal: [managed-node2]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "cannot remove and recreate existing pool 'foo' in safe mode", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': None, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': None, 'fs_label': None, 'fs_type': None, 'mount_options': None, 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': None, 'cached': None, 'cache_devices': [], 'cache_mode': None, 'cache_size': None, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:22:01 -0400 (0:00:00.059)       0:00:55.287 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:23
Monday 21 October 2024  18:22:01 -0400 (0:00:00.038)       0:00:55.326 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:28
Monday 21 October 2024  18:22:01 -0400 (0:00:00.045)       0:00:55.372 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:39
Monday 21 October 2024  18:22:01 -0400 (0:00:00.068)       0:00:55.440 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Stat the file] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml:11
Monday 21 October 2024  18:22:01 -0400 (0:00:00.056)       0:00:55.497 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549318.0939867,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1729549318.0939867,
        "dev": 64769,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 131,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0644",
        "mtime": 1729549318.0939867,
        "nlink": 1,
        "path": "/opt/test1/quux",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2057274523",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Assert file presence] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml:16
Monday 21 October 2024  18:22:01 -0400 (0:00:00.451)       0:00:55.949 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Remove the encryption layer] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:165
Monday 21 October 2024  18:22:01 -0400 (0:00:00.052)       0:00:56.001 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:22:02 -0400 (0:00:00.079)       0:00:56.081 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:22:02 -0400 (0:00:00.056)       0:00:56.137 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:22:02 -0400 (0:00:00.060)       0:00:56.198 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:22:02 -0400 (0:00:00.119)       0:00:56.317 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:22:02 -0400 (0:00:00.067)       0:00:56.385 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:22:02 -0400 (0:00:00.057)       0:00:56.442 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:22:02 -0400 (0:00:00.031)       0:00:56.473 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:22:02 -0400 (0:00:00.032)       0:00:56.506 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:22:02 -0400 (0:00:00.065)       0:00:56.571 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:22:02 -0400 (0:00:00.030)       0:00:56.602 ******** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:22:02 -0400 (0:00:00.029)       0:00:56.632 ******** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined"
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:22:02 -0400 (0:00:00.041)       0:00:56.673 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:22:02 -0400 (0:00:00.032)       0:00:56.706 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:22:02 -0400 (0:00:00.035)       0:00:56.741 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:22:02 -0400 (0:00:00.044)       0:00:56.786 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:22:02 -0400 (0:00:00.043)       0:00:56.830 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:22:02 -0400 (0:00:00.073)       0:00:56.904 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:22:02 -0400 (0:00:00.025)       0:00:56.929 ******** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "luks"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-286422e3-4efe-4f44-9bea-3dafa1288253",
            "password": "-",
            "state": "absent"
        }
    ],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/xvda3",
        "/dev/xvda4",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "lvm2",
        "dosfstools"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks2",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [redhat.rhel_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 21 October 2024  18:22:05 -0400 (0:00:02.778)       0:00:59.708 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if /etc/fstab is present] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 21 October 2024  18:22:05 -0400 (0:00:00.072)       0:00:59.780 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549299.6759043,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "0d495c4508d8f74dc9d1b472b6e1b1d638f6ad31",
        "ctime": 1729549299.674904,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 822083726,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1729549299.674904,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1173,
        "uid": 0,
        "version": "1383220658",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 21 October 2024  18:22:06 -0400 (0:00:00.495)       0:01:00.275 ******** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:22:06 -0400 (0:00:00.547)       0:01:00.823 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Show blivet_output] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 21 October 2024  18:22:06 -0400 (0:00:00.045)       0:01:00.869 ******** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/luks-286422e3-4efe-4f44-9bea-3dafa1288253",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "luks"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-286422e3-4efe-4f44-9bea-3dafa1288253",
                "password": "-",
                "state": "absent"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/xvda3",
            "/dev/xvda4",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "lvm2",
            "dosfstools"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks2",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [redhat.rhel_system_roles.storage : Set the list of pools for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 21 October 2024  18:22:06 -0400 (0:00:00.066)       0:01:00.935 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks2",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Set the list of volumes for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 21 October 2024  18:22:06 -0400 (0:00:00.048)       0:01:00.983 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Remove obsolete mounts] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 21 October 2024  18:22:06 -0400 (0:00:00.043)       0:01:01.027 ******** 
changed: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 21 October 2024  18:22:07 -0400 (0:00:00.546)       0:01:01.574 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Set up new/current mounts] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 21 October 2024  18:22:08 -0400 (0:00:00.779)       0:01:02.353 ******** 
changed: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [redhat.rhel_system_roles.storage : Manage mount ownership/permissions] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 21 October 2024  18:22:08 -0400 (0:00:00.426)       0:01:02.780 ******** 
skipping: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 21 October 2024  18:22:08 -0400 (0:00:00.065)       0:01:02.846 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 21 October 2024  18:22:09 -0400 (0:00:00.713)       0:01:03.560 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549301.2779114,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "3d62f2c6621437fb9e6018580e880f1668026c95",
        "ctime": 1729549301.2539113,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 922747142,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0600",
        "mtime": 1729549301.2489111,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 87,
        "uid": 0,
        "version": "1884942756",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 21 October 2024  18:22:09 -0400 (0:00:00.427)       0:01:03.987 ******** 
changed: [managed-node2] => (item={'backing_device': '/dev/sda', 'name': 'luks-286422e3-4efe-4f44-9bea-3dafa1288253', 'password': '-', 'state': 'absent'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-286422e3-4efe-4f44-9bea-3dafa1288253",
        "password": "-",
        "state": "absent"
    },
    "found": 1
}

MSG:

1 line(s) removed

TASK [redhat.rhel_system_roles.storage : Update facts] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 21 October 2024  18:22:10 -0400 (0:00:00.503)       0:01:04.491 ******** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:181
Monday 21 October 2024  18:22:11 -0400 (0:00:01.072)       0:01:05.563 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:2
Monday 21 October 2024  18:22:11 -0400 (0:00:00.091)       0:01:05.654 ******** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks2",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:7
Monday 21 October 2024  18:22:11 -0400 (0:00:00.050)       0:01:05.704 ******** 
skipping: [managed-node2] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:15
Monday 21 October 2024  18:22:11 -0400 (0:00:00.035)       0:01:05.740 ******** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "30b65e6c-2005-4d38-b013-286bed4cb3ec"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "1IR964-5f2Y-2HRQ-eCqB-jD5s-HUwP-AP7QOT"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "vfat",
            "label": "",
            "mountpoint": "/boot/efi",
            "name": "/dev/xvda2",
            "size": "200M",
            "type": "partition",
            "uuid": "7B77-95E7"
        },
        "/dev/xvda3": {
            "fstype": "xfs",
            "label": "boot",
            "mountpoint": "/boot",
            "name": "/dev/xvda3",
            "size": "1G",
            "type": "partition",
            "uuid": "a8cc2a47-4cf2-4d6f-8916-f69641ec5919"
        },
        "/dev/xvda4": {
            "fstype": "xfs",
            "label": "root",
            "mountpoint": "/",
            "name": "/dev/xvda4",
            "size": "248.8G",
            "type": "partition",
            "uuid": "1b4086e3-4d44-4b6e-99dc-43b96b9fea96"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:20
Monday 21 October 2024  18:22:12 -0400 (0:00:00.368)       0:01:06.109 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003019",
    "end": "2024-10-21 18:22:12.346391",
    "rc": 0,
    "start": "2024-10-21 18:22:12.343372"
}

STDOUT:

UUID=1b4086e3-4d44-4b6e-99dc-43b96b9fea96	/	xfs	defaults	0	0
UUID=a8cc2a47-4cf2-4d6f-8916-f69641ec5919	/boot	xfs	defaults	0	0
UUID=7B77-95E7	/boot/efi	vfat	defaults,uid=0,gid=0,umask=077,shortname=winnt	0	2
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
# system_role:storage
/dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:25
Monday 21 October 2024  18:22:12 -0400 (0:00:00.383)       0:01:06.492 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:01.004061",
    "end": "2024-10-21 18:22:13.764067",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-10-21 18:22:12.760006"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:34
Monday 21 October 2024  18:22:13 -0400 (0:00:01.422)       0:01:07.915 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:5
Monday 21 October 2024  18:22:13 -0400 (0:00:00.074)       0:01:07.990 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:18
Monday 21 October 2024  18:22:13 -0400 (0:00:00.034)       0:01:08.024 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028793",
    "end": "2024-10-21 18:22:14.325736",
    "rc": 0,
    "start": "2024-10-21 18:22:14.296943"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:24
Monday 21 October 2024  18:22:14 -0400 (0:00:00.450)       0:01:08.475 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:34
Monday 21 October 2024  18:22:14 -0400 (0:00:00.056)       0:01:08.531 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 21 October 2024  18:22:14 -0400 (0:00:00.065)       0:01:08.597 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 21 October 2024  18:22:14 -0400 (0:00:00.066)       0:01:08.663 ******** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 21 October 2024  18:22:15 -0400 (0:00:00.407)       0:01:09.071 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 21 October 2024  18:22:15 -0400 (0:00:00.066)       0:01:09.137 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 21 October 2024  18:22:15 -0400 (0:00:00.057)       0:01:09.194 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 21 October 2024  18:22:15 -0400 (0:00:00.066)       0:01:09.261 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 21 October 2024  18:22:15 -0400 (0:00:00.029)       0:01:09.291 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 21 October 2024  18:22:15 -0400 (0:00:00.040)       0:01:09.331 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 21 October 2024  18:22:15 -0400 (0:00:00.022)       0:01:09.354 ******** 
ok: [managed-node2] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 21 October 2024  18:22:15 -0400 (0:00:00.032)       0:01:09.386 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:176464): WARNING **: 18:22:15.585: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

Shared connection to 10.31.8.235 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:73
Monday 21 October 2024  18:22:15 -0400 (0:00:00.431)       0:01:09.818 ******** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:83
Monday 21 October 2024  18:22:15 -0400 (0:00:00.084)       0:01:09.903 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:8
Monday 21 October 2024  18:22:15 -0400 (0:00:00.098)       0:01:10.002 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:14
Monday 21 October 2024  18:22:15 -0400 (0:00:00.041)       0:01:10.043 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:19
Monday 21 October 2024  18:22:16 -0400 (0:00:00.084)       0:01:10.128 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:24
Monday 21 October 2024  18:22:16 -0400 (0:00:00.054)       0:01:10.183 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:29
Monday 21 October 2024  18:22:16 -0400 (0:00:00.057)       0:01:10.240 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:37
Monday 21 October 2024  18:22:16 -0400 (0:00:00.071)       0:01:10.312 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:46
Monday 21 October 2024  18:22:16 -0400 (0:00:00.052)       0:01:10.364 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:55
Monday 21 October 2024  18:22:16 -0400 (0:00:00.064)       0:01:10.429 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:64
Monday 21 October 2024  18:22:16 -0400 (0:00:00.052)       0:01:10.482 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:74
Monday 21 October 2024  18:22:16 -0400 (0:00:00.050)       0:01:10.533 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:83
Monday 21 October 2024  18:22:16 -0400 (0:00:00.074)       0:01:10.607 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:86
Monday 21 October 2024  18:22:16 -0400 (0:00:00.058)       0:01:10.666 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 21 October 2024  18:22:16 -0400 (0:00:00.085)       0:01:10.752 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Monday 21 October 2024  18:22:16 -0400 (0:00:00.078)       0:01:10.830 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Monday 21 October 2024  18:22:16 -0400 (0:00:00.051)       0:01:10.882 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Monday 21 October 2024  18:22:16 -0400 (0:00:00.048)       0:01:10.930 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Monday 21 October 2024  18:22:16 -0400 (0:00:00.053)       0:01:10.984 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Monday 21 October 2024  18:22:16 -0400 (0:00:00.049)       0:01:11.033 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Monday 21 October 2024  18:22:17 -0400 (0:00:00.049)       0:01:11.083 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Monday 21 October 2024  18:22:17 -0400 (0:00:00.050)       0:01:11.133 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:89
Monday 21 October 2024  18:22:17 -0400 (0:00:00.054)       0:01:11.188 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 21 October 2024  18:22:17 -0400 (0:00:00.134)       0:01:11.323 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:8
Monday 21 October 2024  18:22:17 -0400 (0:00:00.119)       0:01:11.443 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:16
Monday 21 October 2024  18:22:17 -0400 (0:00:00.053)       0:01:11.496 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:22
Monday 21 October 2024  18:22:17 -0400 (0:00:00.038)       0:01:11.534 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:26
Monday 21 October 2024  18:22:17 -0400 (0:00:00.039)       0:01:11.574 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:92
Monday 21 October 2024  18:22:17 -0400 (0:00:00.042)       0:01:11.616 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 21 October 2024  18:22:17 -0400 (0:00:00.097)       0:01:11.714 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 21 October 2024  18:22:17 -0400 (0:00:00.087)       0:01:11.801 ******** 
skipping: [managed-node2] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 21 October 2024  18:22:17 -0400 (0:00:00.049)       0:01:11.850 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Monday 21 October 2024  18:22:17 -0400 (0:00:00.082)       0:01:11.933 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Monday 21 October 2024  18:22:17 -0400 (0:00:00.082)       0:01:12.015 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Monday 21 October 2024  18:22:18 -0400 (0:00:00.096)       0:01:12.111 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Monday 21 October 2024  18:22:18 -0400 (0:00:00.057)       0:01:12.169 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Monday 21 October 2024  18:22:18 -0400 (0:00:00.061)       0:01:12.231 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Monday 21 October 2024  18:22:18 -0400 (0:00:00.064)       0:01:12.295 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 21 October 2024  18:22:18 -0400 (0:00:00.074)       0:01:12.370 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:95
Monday 21 October 2024  18:22:18 -0400 (0:00:00.063)       0:01:12.434 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 21 October 2024  18:22:18 -0400 (0:00:00.198)       0:01:12.633 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Monday 21 October 2024  18:22:18 -0400 (0:00:00.054)       0:01:12.687 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Monday 21 October 2024  18:22:18 -0400 (0:00:00.044)       0:01:12.732 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Monday 21 October 2024  18:22:18 -0400 (0:00:00.043)       0:01:12.775 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Monday 21 October 2024  18:22:18 -0400 (0:00:00.041)       0:01:12.817 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Monday 21 October 2024  18:22:18 -0400 (0:00:00.039)       0:01:12.856 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Monday 21 October 2024  18:22:18 -0400 (0:00:00.043)       0:01:12.900 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Monday 21 October 2024  18:22:18 -0400 (0:00:00.036)       0:01:12.936 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:98
Monday 21 October 2024  18:22:18 -0400 (0:00:00.037)       0:01:12.973 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 21 October 2024  18:22:18 -0400 (0:00:00.066)       0:01:13.040 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 21 October 2024  18:22:18 -0400 (0:00:00.024)       0:01:13.064 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 21 October 2024  18:22:19 -0400 (0:00:00.023)       0:01:13.088 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 21 October 2024  18:22:19 -0400 (0:00:00.022)       0:01:13.111 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 21 October 2024  18:22:19 -0400 (0:00:00.024)       0:01:13.136 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 21 October 2024  18:22:19 -0400 (0:00:00.024)       0:01:13.160 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:101
Monday 21 October 2024  18:22:19 -0400 (0:00:00.029)       0:01:13.189 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 21 October 2024  18:22:19 -0400 (0:00:00.034)       0:01:13.224 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:2
Monday 21 October 2024  18:22:19 -0400 (0:00:00.070)       0:01:13.295 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:19
Monday 21 October 2024  18:22:19 -0400 (0:00:00.072)       0:01:13.368 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 21 October 2024  18:22:19 -0400 (0:00:00.228)       0:01:13.596 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 21 October 2024  18:22:19 -0400 (0:00:00.100)       0:01:13.696 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 21 October 2024  18:22:19 -0400 (0:00:00.179)       0:01:13.875 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 21 October 2024  18:22:19 -0400 (0:00:00.086)       0:01:13.962 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 21 October 2024  18:22:19 -0400 (0:00:00.089)       0:01:14.051 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 21 October 2024  18:22:20 -0400 (0:00:00.079)       0:01:14.131 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 21 October 2024  18:22:20 -0400 (0:00:00.063)       0:01:14.195 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 21 October 2024  18:22:20 -0400 (0:00:00.046)       0:01:14.242 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 21 October 2024  18:22:20 -0400 (0:00:00.042)       0:01:14.284 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 21 October 2024  18:22:20 -0400 (0:00:00.046)       0:01:14.330 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 21 October 2024  18:22:20 -0400 (0:00:00.042)       0:01:14.373 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 21 October 2024  18:22:20 -0400 (0:00:00.036)       0:01:14.409 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 21 October 2024  18:22:20 -0400 (0:00:00.077)       0:01:14.487 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 21 October 2024  18:22:20 -0400 (0:00:00.059)       0:01:14.546 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 21 October 2024  18:22:20 -0400 (0:00:00.129)       0:01:14.675 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 21 October 2024  18:22:20 -0400 (0:00:00.046)       0:01:14.722 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Monday 21 October 2024  18:22:20 -0400 (0:00:00.039)       0:01:14.762 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 21 October 2024  18:22:20 -0400 (0:00:00.033)       0:01:14.795 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 21 October 2024  18:22:20 -0400 (0:00:00.055)       0:01:14.851 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 21 October 2024  18:22:20 -0400 (0:00:00.046)       0:01:14.898 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549330.3390417,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1729549325.50902,
        "dev": 5,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8033,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1729549325.50902,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 21 October 2024  18:22:21 -0400 (0:00:00.380)       0:01:15.278 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 21 October 2024  18:22:21 -0400 (0:00:00.051)       0:01:15.330 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 21 October 2024  18:22:21 -0400 (0:00:00.035)       0:01:15.365 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 21 October 2024  18:22:21 -0400 (0:00:00.040)       0:01:15.405 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 21 October 2024  18:22:21 -0400 (0:00:00.038)       0:01:15.444 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 21 October 2024  18:22:21 -0400 (0:00:00.028)       0:01:15.473 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 21 October 2024  18:22:21 -0400 (0:00:00.037)       0:01:15.510 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 21 October 2024  18:22:21 -0400 (0:00:00.032)       0:01:15.542 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 21 October 2024  18:22:22 -0400 (0:00:00.811)       0:01:16.354 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 21 October 2024  18:22:22 -0400 (0:00:00.035)       0:01:16.390 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 21 October 2024  18:22:22 -0400 (0:00:00.044)       0:01:16.434 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 21 October 2024  18:22:22 -0400 (0:00:00.085)       0:01:16.520 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 21 October 2024  18:22:22 -0400 (0:00:00.058)       0:01:16.579 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 21 October 2024  18:22:22 -0400 (0:00:00.049)       0:01:16.628 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 21 October 2024  18:22:22 -0400 (0:00:00.049)       0:01:16.678 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 21 October 2024  18:22:22 -0400 (0:00:00.065)       0:01:16.743 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 21 October 2024  18:22:22 -0400 (0:00:00.055)       0:01:16.799 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 21 October 2024  18:22:22 -0400 (0:00:00.074)       0:01:16.873 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 21 October 2024  18:22:22 -0400 (0:00:00.077)       0:01:16.950 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 21 October 2024  18:22:22 -0400 (0:00:00.050)       0:01:17.001 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 21 October 2024  18:22:22 -0400 (0:00:00.068)       0:01:17.070 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 21 October 2024  18:22:23 -0400 (0:00:00.064)       0:01:17.135 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 21 October 2024  18:22:23 -0400 (0:00:00.043)       0:01:17.178 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 21 October 2024  18:22:23 -0400 (0:00:00.044)       0:01:17.223 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 21 October 2024  18:22:23 -0400 (0:00:00.037)       0:01:17.260 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 21 October 2024  18:22:23 -0400 (0:00:00.037)       0:01:17.298 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 21 October 2024  18:22:23 -0400 (0:00:00.040)       0:01:17.338 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 21 October 2024  18:22:23 -0400 (0:00:00.038)       0:01:17.377 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 21 October 2024  18:22:23 -0400 (0:00:00.027)       0:01:17.404 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 21 October 2024  18:22:23 -0400 (0:00:00.025)       0:01:17.429 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 21 October 2024  18:22:23 -0400 (0:00:00.031)       0:01:17.461 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 21 October 2024  18:22:23 -0400 (0:00:00.027)       0:01:17.488 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 21 October 2024  18:22:23 -0400 (0:00:00.031)       0:01:17.520 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 21 October 2024  18:22:23 -0400 (0:00:00.393)       0:01:17.913 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 21 October 2024  18:22:24 -0400 (0:00:00.488)       0:01:18.401 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 21 October 2024  18:22:24 -0400 (0:00:00.046)       0:01:18.448 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 21 October 2024  18:22:24 -0400 (0:00:00.028)       0:01:18.476 ******** 
ok: [managed-node2] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 21 October 2024  18:22:24 -0400 (0:00:00.436)       0:01:18.913 ******** 
skipping: [managed-node2] => {}

TASK [Show test blockinfo] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 21 October 2024  18:22:24 -0400 (0:00:00.074)       0:01:18.988 ******** 
skipping: [managed-node2] => {}

TASK [Show test pool size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 21 October 2024  18:22:24 -0400 (0:00:00.072)       0:01:19.061 ******** 
skipping: [managed-node2] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 21 October 2024  18:22:25 -0400 (0:00:00.073)       0:01:19.134 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 21 October 2024  18:22:25 -0400 (0:00:00.075)       0:01:19.210 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 21 October 2024  18:22:25 -0400 (0:00:00.039)       0:01:19.250 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 21 October 2024  18:22:25 -0400 (0:00:00.045)       0:01:19.296 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 21 October 2024  18:22:25 -0400 (0:00:00.043)       0:01:19.339 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 21 October 2024  18:22:25 -0400 (0:00:00.046)       0:01:19.386 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 21 October 2024  18:22:25 -0400 (0:00:00.045)       0:01:19.431 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 21 October 2024  18:22:25 -0400 (0:00:00.045)       0:01:19.477 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 21 October 2024  18:22:25 -0400 (0:00:00.045)       0:01:19.522 ******** 
skipping: [managed-node2] => {}

TASK [Show volume thin pool size] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 21 October 2024  18:22:25 -0400 (0:00:00.048)       0:01:19.570 ******** 
skipping: [managed-node2] => {}

TASK [Show test volume size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 21 October 2024  18:22:25 -0400 (0:00:00.045)       0:01:19.616 ******** 
skipping: [managed-node2] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 21 October 2024  18:22:25 -0400 (0:00:00.041)       0:01:19.658 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 21 October 2024  18:22:25 -0400 (0:00:00.045)       0:01:19.703 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 21 October 2024  18:22:25 -0400 (0:00:00.046)       0:01:19.750 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 21 October 2024  18:22:25 -0400 (0:00:00.071)       0:01:19.822 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 21 October 2024  18:22:25 -0400 (0:00:00.091)       0:01:19.913 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 21 October 2024  18:22:25 -0400 (0:00:00.075)       0:01:19.988 ******** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 21 October 2024  18:22:26 -0400 (0:00:00.093)       0:01:20.081 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 21 October 2024  18:22:26 -0400 (0:00:00.094)       0:01:20.176 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 21 October 2024  18:22:26 -0400 (0:00:00.120)       0:01:20.297 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.028203",
    "end": "2024-10-21 18:22:26.655673",
    "rc": 0,
    "start": "2024-10-21 18:22:26.627470"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 21 October 2024  18:22:26 -0400 (0:00:00.505)       0:01:20.803 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 21 October 2024  18:22:26 -0400 (0:00:00.075)       0:01:20.879 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 21 October 2024  18:22:26 -0400 (0:00:00.081)       0:01:20.960 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 21 October 2024  18:22:26 -0400 (0:00:00.071)       0:01:21.032 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 21 October 2024  18:22:27 -0400 (0:00:00.069)       0:01:21.101 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 21 October 2024  18:22:27 -0400 (0:00:00.066)       0:01:21.168 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:25
Monday 21 October 2024  18:22:27 -0400 (0:00:00.072)       0:01:21.241 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:44
Monday 21 October 2024  18:22:27 -0400 (0:00:00.049)       0:01:21.290 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:54
Monday 21 October 2024  18:22:27 -0400 (0:00:00.040)       0:01:21.331 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create a file] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/create-test-file.yml:12
Monday 21 October 2024  18:22:27 -0400 (0:00:00.045)       0:01:21.376 ******** 
changed: [managed-node2] => {
    "changed": true,
    "dest": "/opt/test1/quux",
    "gid": 0,
    "group": "root",
    "mode": "0644",
    "owner": "root",
    "secontext": "unconfined_u:object_r:unlabeled_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Test for correct handling of safe_mode] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:187
Monday 21 October 2024  18:22:27 -0400 (0:00:00.525)       0:01:21.902 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml for managed-node2

TASK [Store global variable value copy] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:4
Monday 21 October 2024  18:22:27 -0400 (0:00:00.110)       0:01:22.012 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": true,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:10
Monday 21 October 2024  18:22:28 -0400 (0:00:00.090)       0:01:22.102 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:22:28 -0400 (0:00:00.068)       0:01:22.171 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:22:28 -0400 (0:00:00.069)       0:01:22.240 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:22:28 -0400 (0:00:00.076)       0:01:22.317 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:22:28 -0400 (0:00:00.183)       0:01:22.500 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:22:28 -0400 (0:00:00.081)       0:01:22.582 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:22:28 -0400 (0:00:00.058)       0:01:22.640 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:22:28 -0400 (0:00:00.055)       0:01:22.695 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:22:28 -0400 (0:00:00.089)       0:01:22.784 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:22:28 -0400 (0:00:00.135)       0:01:22.920 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:22:28 -0400 (0:00:00.098)       0:01:23.018 ******** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:22:29 -0400 (0:00:00.150)       0:01:23.169 ******** 
ok: [managed-node2] => {
    "storage_volumes": []
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:22:29 -0400 (0:00:00.123)       0:01:23.292 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:22:29 -0400 (0:00:00.078)       0:01:23.371 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:22:29 -0400 (0:00:00.061)       0:01:23.432 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:22:29 -0400 (0:00:00.052)       0:01:23.484 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:22:29 -0400 (0:00:00.064)       0:01:23.549 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:22:29 -0400 (0:00:00.089)       0:01:23.638 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:22:29 -0400 (0:00:00.026)       0:01:23.665 ******** 
fatal: [managed-node2]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

cannot remove and recreate existing pool 'foo' in safe mode

TASK [redhat.rhel_system_roles.storage : Failed message] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:109
Monday 21 October 2024  18:22:30 -0400 (0:00:01.390)       0:01:25.056 ******** 
fatal: [managed-node2]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "cannot remove and recreate existing pool 'foo' in safe mode", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': True, 'encryption_cipher': 'aes-xts-plain64', 'encryption_key': None, 'encryption_key_size': 512, 'encryption_luks_version': 'luks1', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': None, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': None, 'fs_label': None, 'fs_type': None, 'mount_options': None, 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': None, 'cached': None, 'cache_devices': [], 'cache_mode': None, 'cache_size': None, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:22:31 -0400 (0:00:00.058)       0:01:25.114 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:23
Monday 21 October 2024  18:22:31 -0400 (0:00:00.037)       0:01:25.152 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:28
Monday 21 October 2024  18:22:31 -0400 (0:00:00.058)       0:01:25.211 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-failed.yml:39
Monday 21 October 2024  18:22:31 -0400 (0:00:00.067)       0:01:25.278 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Stat the file] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml:11
Monday 21 October 2024  18:22:31 -0400 (0:00:00.049)       0:01:25.328 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549347.7331197,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1729549347.7331197,
        "dev": 64768,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 131,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0644",
        "mtime": 1729549347.7331197,
        "nlink": 1,
        "path": "/opt/test1/quux",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "4078691840",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Assert file presence] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml:16
Monday 21 October 2024  18:22:31 -0400 (0:00:00.481)       0:01:25.809 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Add encryption to the pool] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:212
Monday 21 October 2024  18:22:31 -0400 (0:00:00.081)       0:01:25.892 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:22:31 -0400 (0:00:00.125)       0:01:26.017 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:22:32 -0400 (0:00:00.078)       0:01:26.096 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:22:32 -0400 (0:00:00.055)       0:01:26.152 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:22:32 -0400 (0:00:00.071)       0:01:26.224 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:22:32 -0400 (0:00:00.073)       0:01:26.297 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:22:32 -0400 (0:00:00.052)       0:01:26.350 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:22:32 -0400 (0:00:00.038)       0:01:26.388 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:22:32 -0400 (0:00:00.029)       0:01:26.418 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:22:32 -0400 (0:00:00.064)       0:01:26.483 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:22:32 -0400 (0:00:00.037)       0:01:26.521 ******** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:22:32 -0400 (0:00:00.048)       0:01:26.569 ******** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined"
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:22:32 -0400 (0:00:00.026)       0:01:26.596 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:22:32 -0400 (0:00:00.029)       0:01:26.626 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:22:32 -0400 (0:00:00.029)       0:01:26.656 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:22:32 -0400 (0:00:00.029)       0:01:26.685 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:22:32 -0400 (0:00:00.032)       0:01:26.717 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:22:32 -0400 (0:00:00.081)       0:01:26.799 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:22:32 -0400 (0:00:00.039)       0:01:26.838 ******** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "luks"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "password": "-",
            "state": "present"
        }
    ],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/xvda3",
        "/dev/xvda4",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "dosfstools",
        "lvm2",
        "xfsprogs",
        "cryptsetup"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [redhat.rhel_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 21 October 2024  18:22:41 -0400 (0:00:08.601)       0:01:35.440 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if /etc/fstab is present] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 21 October 2024  18:22:41 -0400 (0:00:00.036)       0:01:35.476 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549328.616034,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "0d495c4508d8f74dc9d1b472b6e1b1d638f6ad31",
        "ctime": 1729549328.6150339,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 822083726,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1729549328.6150339,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1173,
        "uid": 0,
        "version": "1383220658",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 21 October 2024  18:22:41 -0400 (0:00:00.374)       0:01:35.851 ******** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:22:42 -0400 (0:00:00.411)       0:01:36.263 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Show blivet_output] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 21 October 2024  18:22:42 -0400 (0:00:00.020)       0:01:36.283 ******** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "luks"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "password": "-",
                "state": "present"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/xvda3",
            "/dev/xvda4",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "dosfstools",
            "lvm2",
            "xfsprogs",
            "cryptsetup"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": "aes-xts-plain64",
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 512,
                "encryption_luks_version": "luks1",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [redhat.rhel_system_roles.storage : Set the list of pools for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 21 October 2024  18:22:42 -0400 (0:00:00.030)       0:01:36.314 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": "aes-xts-plain64",
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 512,
                "encryption_luks_version": "luks1",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Set the list of volumes for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 21 October 2024  18:22:42 -0400 (0:00:00.028)       0:01:36.342 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Remove obsolete mounts] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 21 October 2024  18:22:42 -0400 (0:00:00.027)       0:01:36.369 ******** 
changed: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 21 October 2024  18:22:42 -0400 (0:00:00.509)       0:01:36.879 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Set up new/current mounts] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 21 October 2024  18:22:43 -0400 (0:00:00.746)       0:01:37.626 ******** 
changed: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [redhat.rhel_system_roles.storage : Manage mount ownership/permissions] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 21 October 2024  18:22:44 -0400 (0:00:00.500)       0:01:38.127 ******** 
skipping: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 21 October 2024  18:22:44 -0400 (0:00:00.088)       0:01:38.216 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 21 October 2024  18:22:44 -0400 (0:00:00.745)       0:01:38.962 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549332.7620525,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1729549330.3200417,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 452984979,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1729549330.3190417,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "2008719639",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 21 October 2024  18:22:45 -0400 (0:00:00.390)       0:01:39.352 ******** 
changed: [managed-node2] => (item={'backing_device': '/dev/sda', 'name': 'luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7', 'password': '-', 'state': 'present'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
        "password": "-",
        "state": "present"
    }
}

MSG:

line added

TASK [redhat.rhel_system_roles.storage : Update facts] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 21 October 2024  18:22:45 -0400 (0:00:00.402)       0:01:39.755 ******** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:231
Monday 21 October 2024  18:22:46 -0400 (0:00:01.000)       0:01:40.755 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:2
Monday 21 October 2024  18:22:46 -0400 (0:00:00.097)       0:01:40.852 ******** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:7
Monday 21 October 2024  18:22:46 -0400 (0:00:00.077)       0:01:40.929 ******** 
skipping: [managed-node2] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:15
Monday 21 October 2024  18:22:46 -0400 (0:00:00.062)       0:01:40.992 ******** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "6cc18fb1-5dbc-4cae-80cb-22fca6458d6d"
        },
        "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "size": "10G",
            "type": "crypt",
            "uuid": "nSBQvS-xreE-rXgK-O8WW-JK9I-ShAv-3U0VhC"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "vfat",
            "label": "",
            "mountpoint": "/boot/efi",
            "name": "/dev/xvda2",
            "size": "200M",
            "type": "partition",
            "uuid": "7B77-95E7"
        },
        "/dev/xvda3": {
            "fstype": "xfs",
            "label": "boot",
            "mountpoint": "/boot",
            "name": "/dev/xvda3",
            "size": "1G",
            "type": "partition",
            "uuid": "a8cc2a47-4cf2-4d6f-8916-f69641ec5919"
        },
        "/dev/xvda4": {
            "fstype": "xfs",
            "label": "root",
            "mountpoint": "/",
            "name": "/dev/xvda4",
            "size": "248.8G",
            "type": "partition",
            "uuid": "1b4086e3-4d44-4b6e-99dc-43b96b9fea96"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:20
Monday 21 October 2024  18:22:47 -0400 (0:00:00.479)       0:01:41.472 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002912",
    "end": "2024-10-21 18:22:47.758442",
    "rc": 0,
    "start": "2024-10-21 18:22:47.755530"
}

STDOUT:

UUID=1b4086e3-4d44-4b6e-99dc-43b96b9fea96	/	xfs	defaults	0	0
UUID=a8cc2a47-4cf2-4d6f-8916-f69641ec5919	/boot	xfs	defaults	0	0
UUID=7B77-95E7	/boot/efi	vfat	defaults,uid=0,gid=0,umask=077,shortname=winnt	0	2
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
# system_role:storage
/dev/mapper/foo-test1 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:25
Monday 21 October 2024  18:22:47 -0400 (0:00:00.433)       0:01:41.905 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002925",
    "end": "2024-10-21 18:22:48.210027",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-10-21 18:22:48.207102"
}

STDOUT:

luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7 /dev/sda -

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:34
Monday 21 October 2024  18:22:48 -0400 (0:00:00.436)       0:01:42.341 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': 'aes-xts-plain64', 'encryption_key': None, 'encryption_key_size': 512, 'encryption_luks_version': 'luks1', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:5
Monday 21 October 2024  18:22:48 -0400 (0:00:00.117)       0:01:42.459 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:18
Monday 21 October 2024  18:22:48 -0400 (0:00:00.040)       0:01:42.499 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028695",
    "end": "2024-10-21 18:22:48.860663",
    "rc": 0,
    "start": "2024-10-21 18:22:48.831968"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:24
Monday 21 October 2024  18:22:48 -0400 (0:00:00.517)       0:01:43.016 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:34
Monday 21 October 2024  18:22:49 -0400 (0:00:00.057)       0:01:43.073 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 21 October 2024  18:22:49 -0400 (0:00:00.082)       0:01:43.155 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 21 October 2024  18:22:49 -0400 (0:00:00.066)       0:01:43.222 ******** 
ok: [managed-node2] => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
    "pv": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
}

TASK [Set pvs lvm length] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 21 October 2024  18:22:49 -0400 (0:00:00.374)       0:01:43.596 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 21 October 2024  18:22:49 -0400 (0:00:00.059)       0:01:43.656 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 21 October 2024  18:22:49 -0400 (0:00:00.056)       0:01:43.713 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 21 October 2024  18:22:49 -0400 (0:00:00.048)       0:01:43.761 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "crypt"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 21 October 2024  18:22:49 -0400 (0:00:00.030)       0:01:43.792 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 21 October 2024  18:22:49 -0400 (0:00:00.025)       0:01:43.817 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 21 October 2024  18:22:49 -0400 (0:00:00.024)       0:01:43.842 ******** 
ok: [managed-node2] => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 21 October 2024  18:22:49 -0400 (0:00:00.038)       0:01:43.881 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:179609): WARNING **: 18:22:50.096: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

Shared connection to 10.31.8.235 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:73
Monday 21 October 2024  18:22:50 -0400 (0:00:00.433)       0:01:44.314 ******** 
skipping: [managed-node2] => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:83
Monday 21 October 2024  18:22:50 -0400 (0:00:00.043)       0:01:44.357 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:8
Monday 21 October 2024  18:22:50 -0400 (0:00:00.048)       0:01:44.405 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:14
Monday 21 October 2024  18:22:50 -0400 (0:00:00.023)       0:01:44.429 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:19
Monday 21 October 2024  18:22:50 -0400 (0:00:00.022)       0:01:44.451 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:24
Monday 21 October 2024  18:22:50 -0400 (0:00:00.024)       0:01:44.476 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:29
Monday 21 October 2024  18:22:50 -0400 (0:00:00.023)       0:01:44.499 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:37
Monday 21 October 2024  18:22:50 -0400 (0:00:00.022)       0:01:44.522 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:46
Monday 21 October 2024  18:22:50 -0400 (0:00:00.022)       0:01:44.544 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:55
Monday 21 October 2024  18:22:50 -0400 (0:00:00.024)       0:01:44.568 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:64
Monday 21 October 2024  18:22:50 -0400 (0:00:00.023)       0:01:44.592 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:74
Monday 21 October 2024  18:22:50 -0400 (0:00:00.023)       0:01:44.615 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:83
Monday 21 October 2024  18:22:50 -0400 (0:00:00.022)       0:01:44.638 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:86
Monday 21 October 2024  18:22:50 -0400 (0:00:00.026)       0:01:44.664 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 21 October 2024  18:22:50 -0400 (0:00:00.045)       0:01:44.710 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Monday 21 October 2024  18:22:50 -0400 (0:00:00.046)       0:01:44.757 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Monday 21 October 2024  18:22:50 -0400 (0:00:00.119)       0:01:44.877 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Monday 21 October 2024  18:22:50 -0400 (0:00:00.035)       0:01:44.913 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Monday 21 October 2024  18:22:50 -0400 (0:00:00.038)       0:01:44.952 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Monday 21 October 2024  18:22:50 -0400 (0:00:00.044)       0:01:44.996 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Monday 21 October 2024  18:22:50 -0400 (0:00:00.047)       0:01:45.044 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Monday 21 October 2024  18:22:51 -0400 (0:00:00.028)       0:01:45.072 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:89
Monday 21 October 2024  18:22:51 -0400 (0:00:00.029)       0:01:45.102 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 21 October 2024  18:22:51 -0400 (0:00:00.048)       0:01:45.150 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about thinpool] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:8
Monday 21 October 2024  18:22:51 -0400 (0:00:00.043)       0:01:45.193 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:16
Monday 21 October 2024  18:22:51 -0400 (0:00:00.027)       0:01:45.221 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:22
Monday 21 October 2024  18:22:51 -0400 (0:00:00.040)       0:01:45.261 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:26
Monday 21 October 2024  18:22:51 -0400 (0:00:00.046)       0:01:45.308 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:92
Monday 21 October 2024  18:22:51 -0400 (0:00:00.078)       0:01:45.386 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 21 October 2024  18:22:51 -0400 (0:00:00.103)       0:01:45.490 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 21 October 2024  18:22:51 -0400 (0:00:00.077)       0:01:45.567 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml for managed-node2 => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7)

TASK [Get the backing device path] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:2
Monday 21 October 2024  18:22:51 -0400 (0:00:00.070)       0:01:45.637 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/disk/by-uuid/a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
    ],
    "delta": "0:00:00.002981",
    "end": "2024-10-21 18:22:51.915748",
    "rc": 0,
    "start": "2024-10-21 18:22:51.912767"
}

STDOUT:

/dev/sda

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:12
Monday 21 October 2024  18:22:51 -0400 (0:00:00.414)       0:01:46.051 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this member] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:18
Monday 21 October 2024  18:22:52 -0400 (0:00:00.835)       0:01:46.887 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cryptsetup",
        "luksDump",
        "/dev/sda"
    ],
    "delta": "0:00:00.006173",
    "end": "2024-10-21 18:22:53.167822",
    "rc": 0,
    "start": "2024-10-21 18:22:53.161649"
}

STDOUT:

LUKS header information for /dev/sda

Version:       	1
Cipher name:   	aes
Cipher mode:   	xts-plain64
Hash spec:     	sha256
Payload offset:	16384
MK bits:       	512
MK digest:     	ce 2a 99 3b b9 20 2a 25 a1 d4 b0 b4 fd 92 ee ab a3 eb 49 c4 
MK salt:       	70 a3 2c 58 66 17 37 08 3d dd b9 79 1c 84 36 00 
               	7d 33 a9 40 7b 11 10 c7 0d 76 89 93 be 6f cf 5d 
MK iterations: 	106910
UUID:          	a1c15428-f81f-4ba5-b6d8-cb21453ee5a7

Key Slot 0: ENABLED
	Iterations:         	1710564
	Salt:               	1a 7e 96 12 d4 fc 6d 93 0e 80 f2 cf d0 6b 13 db 
	                      	95 dd ca 05 5e 24 90 09 ea fb b3 f9 45 33 0b 8c 
	Key material offset:	8
	AF stripes:            	4000
Key Slot 1: DISABLED
Key Slot 2: DISABLED
Key Slot 3: DISABLED
Key Slot 4: DISABLED
Key Slot 5: DISABLED
Key Slot 6: DISABLED
Key Slot 7: DISABLED

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:26
Monday 21 October 2024  18:22:53 -0400 (0:00:00.413)       0:01:47.300 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:38
Monday 21 October 2024  18:22:53 -0400 (0:00:00.068)       0:01:47.369 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:50
Monday 21 October 2024  18:22:53 -0400 (0:00:00.051)       0:01:47.421 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate pool member crypttab entries] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 21 October 2024  18:22:53 -0400 (0:00:00.061)       0:01:47.482 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7)

TASK [Set variables used by tests] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Monday 21 October 2024  18:22:53 -0400 (0:00:00.064)       0:01:47.546 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [
            "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7 /dev/sda -"
        ]
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Monday 21 October 2024  18:22:53 -0400 (0:00:00.046)       0:01:47.593 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Monday 21 October 2024  18:22:53 -0400 (0:00:00.043)       0:01:47.636 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Monday 21 October 2024  18:22:53 -0400 (0:00:00.043)       0:01:47.680 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Monday 21 October 2024  18:22:53 -0400 (0:00:00.043)       0:01:47.723 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Monday 21 October 2024  18:22:53 -0400 (0:00:00.058)       0:01:47.782 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 21 October 2024  18:22:53 -0400 (0:00:00.032)       0:01:47.815 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:95
Monday 21 October 2024  18:22:53 -0400 (0:00:00.072)       0:01:47.887 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 21 October 2024  18:22:53 -0400 (0:00:00.070)       0:01:47.958 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about VDO deduplication] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Monday 21 October 2024  18:22:53 -0400 (0:00:00.047)       0:01:48.005 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Monday 21 October 2024  18:22:53 -0400 (0:00:00.024)       0:01:48.030 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Monday 21 October 2024  18:22:53 -0400 (0:00:00.023)       0:01:48.053 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Monday 21 October 2024  18:22:54 -0400 (0:00:00.023)       0:01:48.076 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Monday 21 October 2024  18:22:54 -0400 (0:00:00.027)       0:01:48.104 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Monday 21 October 2024  18:22:54 -0400 (0:00:00.042)       0:01:48.147 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Monday 21 October 2024  18:22:54 -0400 (0:00:00.033)       0:01:48.180 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:98
Monday 21 October 2024  18:22:54 -0400 (0:00:00.040)       0:01:48.220 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 21 October 2024  18:22:54 -0400 (0:00:00.098)       0:01:48.319 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 21 October 2024  18:22:54 -0400 (0:00:00.048)       0:01:48.367 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 21 October 2024  18:22:54 -0400 (0:00:00.039)       0:01:48.406 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 21 October 2024  18:22:54 -0400 (0:00:00.044)       0:01:48.450 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 21 October 2024  18:22:54 -0400 (0:00:00.036)       0:01:48.487 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 21 October 2024  18:22:54 -0400 (0:00:00.051)       0:01:48.538 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:101
Monday 21 October 2024  18:22:54 -0400 (0:00:00.053)       0:01:48.591 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 21 October 2024  18:22:54 -0400 (0:00:00.051)       0:01:48.643 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Set storage volume test variables] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:2
Monday 21 October 2024  18:22:54 -0400 (0:00:00.066)       0:01:48.710 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:19
Monday 21 October 2024  18:22:54 -0400 (0:00:00.052)       0:01:48.762 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 21 October 2024  18:22:54 -0400 (0:00:00.139)       0:01:48.901 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 21 October 2024  18:22:54 -0400 (0:00:00.047)       0:01:48.949 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 21 October 2024  18:22:54 -0400 (0:00:00.070)       0:01:49.019 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 21 October 2024  18:22:54 -0400 (0:00:00.040)       0:01:49.059 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 21 October 2024  18:22:55 -0400 (0:00:00.048)       0:01:49.108 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 21 October 2024  18:22:55 -0400 (0:00:00.044)       0:01:49.153 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 21 October 2024  18:22:55 -0400 (0:00:00.054)       0:01:49.207 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 21 October 2024  18:22:55 -0400 (0:00:00.038)       0:01:49.245 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 21 October 2024  18:22:55 -0400 (0:00:00.026)       0:01:49.271 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 21 October 2024  18:22:55 -0400 (0:00:00.027)       0:01:49.299 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 21 October 2024  18:22:55 -0400 (0:00:00.029)       0:01:49.329 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 21 October 2024  18:22:55 -0400 (0:00:00.045)       0:01:49.374 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 21 October 2024  18:22:55 -0400 (0:00:00.112)       0:01:49.487 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 21 October 2024  18:22:55 -0400 (0:00:00.054)       0:01:49.542 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 21 October 2024  18:22:55 -0400 (0:00:00.063)       0:01:49.605 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 21 October 2024  18:22:55 -0400 (0:00:00.161)       0:01:49.766 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Monday 21 October 2024  18:22:55 -0400 (0:00:00.041)       0:01:49.808 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 21 October 2024  18:22:55 -0400 (0:00:00.038)       0:01:49.847 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 21 October 2024  18:22:55 -0400 (0:00:00.074)       0:01:49.921 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 21 October 2024  18:22:55 -0400 (0:00:00.072)       0:01:49.994 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549365.6152,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1729549361.2521803,
        "dev": 5,
        "device_type": 64769,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8203,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1729549361.2521803,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 21 October 2024  18:22:56 -0400 (0:00:00.394)       0:01:50.388 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 21 October 2024  18:22:56 -0400 (0:00:00.031)       0:01:50.420 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 21 October 2024  18:22:56 -0400 (0:00:00.024)       0:01:50.444 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 21 October 2024  18:22:56 -0400 (0:00:00.035)       0:01:50.479 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 21 October 2024  18:22:56 -0400 (0:00:00.037)       0:01:50.517 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 21 October 2024  18:22:56 -0400 (0:00:00.038)       0:01:50.555 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 21 October 2024  18:22:56 -0400 (0:00:00.049)       0:01:50.605 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 21 October 2024  18:22:56 -0400 (0:00:00.047)       0:01:50.653 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 21 October 2024  18:22:57 -0400 (0:00:00.986)       0:01:51.639 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 21 October 2024  18:22:57 -0400 (0:00:00.037)       0:01:51.677 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 21 October 2024  18:22:57 -0400 (0:00:00.057)       0:01:51.734 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 21 October 2024  18:22:57 -0400 (0:00:00.096)       0:01:51.831 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 21 October 2024  18:22:57 -0400 (0:00:00.043)       0:01:51.874 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 21 October 2024  18:22:57 -0400 (0:00:00.059)       0:01:51.934 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 21 October 2024  18:22:57 -0400 (0:00:00.053)       0:01:51.987 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 21 October 2024  18:22:57 -0400 (0:00:00.049)       0:01:52.036 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 21 October 2024  18:22:58 -0400 (0:00:00.052)       0:01:52.089 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 21 October 2024  18:22:58 -0400 (0:00:00.083)       0:01:52.173 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 21 October 2024  18:22:58 -0400 (0:00:00.054)       0:01:52.227 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 21 October 2024  18:22:58 -0400 (0:00:00.054)       0:01:52.282 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 21 October 2024  18:22:58 -0400 (0:00:00.059)       0:01:52.341 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 21 October 2024  18:22:58 -0400 (0:00:00.046)       0:01:52.388 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 21 October 2024  18:22:58 -0400 (0:00:00.043)       0:01:52.432 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 21 October 2024  18:22:58 -0400 (0:00:00.044)       0:01:52.476 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 21 October 2024  18:22:58 -0400 (0:00:00.054)       0:01:52.531 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 21 October 2024  18:22:58 -0400 (0:00:00.038)       0:01:52.569 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 21 October 2024  18:22:58 -0400 (0:00:00.040)       0:01:52.610 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 21 October 2024  18:22:58 -0400 (0:00:00.038)       0:01:52.649 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 21 October 2024  18:22:58 -0400 (0:00:00.037)       0:01:52.686 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 21 October 2024  18:22:58 -0400 (0:00:00.047)       0:01:52.733 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 21 October 2024  18:22:58 -0400 (0:00:00.041)       0:01:52.774 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 21 October 2024  18:22:58 -0400 (0:00:00.048)       0:01:52.823 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 21 October 2024  18:22:58 -0400 (0:00:00.050)       0:01:52.873 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 21 October 2024  18:22:59 -0400 (0:00:00.488)       0:01:53.361 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 21 October 2024  18:22:59 -0400 (0:00:00.408)       0:01:53.770 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 21 October 2024  18:22:59 -0400 (0:00:00.047)       0:01:53.817 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 21 October 2024  18:22:59 -0400 (0:00:00.029)       0:01:53.847 ******** 
ok: [managed-node2] => {
    "bytes": 10715943403,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 21 October 2024  18:23:00 -0400 (0:00:00.460)       0:01:54.308 ******** 
skipping: [managed-node2] => {}

TASK [Show test blockinfo] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 21 October 2024  18:23:00 -0400 (0:00:00.111)       0:01:54.420 ******** 
skipping: [managed-node2] => {}

TASK [Show test pool size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 21 October 2024  18:23:00 -0400 (0:00:00.103)       0:01:54.523 ******** 
skipping: [managed-node2] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 21 October 2024  18:23:00 -0400 (0:00:00.088)       0:01:54.611 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 21 October 2024  18:23:00 -0400 (0:00:00.076)       0:01:54.687 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 21 October 2024  18:23:00 -0400 (0:00:00.048)       0:01:54.736 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 21 October 2024  18:23:00 -0400 (0:00:00.044)       0:01:54.781 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 21 October 2024  18:23:00 -0400 (0:00:00.040)       0:01:54.822 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 21 October 2024  18:23:00 -0400 (0:00:00.042)       0:01:54.864 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 21 October 2024  18:23:00 -0400 (0:00:00.043)       0:01:54.908 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 21 October 2024  18:23:00 -0400 (0:00:00.041)       0:01:54.950 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 21 October 2024  18:23:00 -0400 (0:00:00.039)       0:01:54.989 ******** 
skipping: [managed-node2] => {}

TASK [Show volume thin pool size] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 21 October 2024  18:23:00 -0400 (0:00:00.044)       0:01:55.034 ******** 
skipping: [managed-node2] => {}

TASK [Show test volume size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 21 October 2024  18:23:01 -0400 (0:00:00.041)       0:01:55.076 ******** 
skipping: [managed-node2] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 21 October 2024  18:23:01 -0400 (0:00:00.045)       0:01:55.121 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 21 October 2024  18:23:01 -0400 (0:00:00.042)       0:01:55.164 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 21 October 2024  18:23:01 -0400 (0:00:00.044)       0:01:55.208 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 21 October 2024  18:23:01 -0400 (0:00:00.041)       0:01:55.249 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 21 October 2024  18:23:01 -0400 (0:00:00.041)       0:01:55.291 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 21 October 2024  18:23:01 -0400 (0:00:00.041)       0:01:55.332 ******** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 21 October 2024  18:23:01 -0400 (0:00:00.052)       0:01:55.384 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 21 October 2024  18:23:01 -0400 (0:00:00.048)       0:01:55.432 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 21 October 2024  18:23:01 -0400 (0:00:00.110)       0:01:55.543 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.031403",
    "end": "2024-10-21 18:23:01.958871",
    "rc": 0,
    "start": "2024-10-21 18:23:01.927468"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 21 October 2024  18:23:02 -0400 (0:00:00.580)       0:01:56.123 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 21 October 2024  18:23:02 -0400 (0:00:00.092)       0:01:56.216 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 21 October 2024  18:23:02 -0400 (0:00:00.072)       0:01:56.288 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 21 October 2024  18:23:02 -0400 (0:00:00.060)       0:01:56.348 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 21 October 2024  18:23:02 -0400 (0:00:00.060)       0:01:56.408 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 21 October 2024  18:23:02 -0400 (0:00:00.057)       0:01:56.466 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:25
Monday 21 October 2024  18:23:02 -0400 (0:00:00.061)       0:01:56.527 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:44
Monday 21 October 2024  18:23:02 -0400 (0:00:00.055)       0:01:56.583 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:54
Monday 21 October 2024  18:23:02 -0400 (0:00:00.034)       0:01:56.617 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create a file] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/create-test-file.yml:12
Monday 21 October 2024  18:23:02 -0400 (0:00:00.039)       0:01:56.657 ******** 
changed: [managed-node2] => {
    "changed": true,
    "dest": "/opt/test1/quux",
    "gid": 0,
    "group": "root",
    "mode": "0644",
    "owner": "root",
    "secontext": "unconfined_u:object_r:unlabeled_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Change the mountpoint, leaving encryption in place] **********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:237
Monday 21 October 2024  18:23:03 -0400 (0:00:00.476)       0:01:57.134 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:23:03 -0400 (0:00:00.082)       0:01:57.217 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:23:03 -0400 (0:00:00.048)       0:01:57.265 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:23:03 -0400 (0:00:00.044)       0:01:57.309 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:23:03 -0400 (0:00:00.056)       0:01:57.366 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:23:03 -0400 (0:00:00.028)       0:01:57.394 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:23:03 -0400 (0:00:00.029)       0:01:57.424 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:23:03 -0400 (0:00:00.032)       0:01:57.456 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:23:03 -0400 (0:00:00.031)       0:01:57.488 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:23:03 -0400 (0:00:00.087)       0:01:57.576 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:23:03 -0400 (0:00:00.069)       0:01:57.646 ******** 
ok: [managed-node2] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test2",
                    "name": "test1"
                }
            ]
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:23:03 -0400 (0:00:00.049)       0:01:57.695 ******** 
ok: [managed-node2] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined. 'storage_volumes' is undefined"
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:23:03 -0400 (0:00:00.043)       0:01:57.738 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:23:03 -0400 (0:00:00.046)       0:01:57.784 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:23:03 -0400 (0:00:00.044)       0:01:57.829 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:23:03 -0400 (0:00:00.045)       0:01:57.875 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:23:03 -0400 (0:00:00.049)       0:01:57.924 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:23:03 -0400 (0:00:00.082)       0:01:58.007 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:23:03 -0400 (0:00:00.030)       0:01:58.037 ******** 
ok: [managed-node2] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/xvda3",
        "/dev/xvda4",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "path": "/opt/test1",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test2",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "dosfstools",
        "cryptsetup",
        "lvm2"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 4294967296,
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [redhat.rhel_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 21 October 2024  18:23:05 -0400 (0:00:01.502)       0:01:59.540 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if /etc/fstab is present] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 21 October 2024  18:23:05 -0400 (0:00:00.064)       0:01:59.604 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549363.9541924,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "0d495c4508d8f74dc9d1b472b6e1b1d638f6ad31",
        "ctime": 1729549363.9531925,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 822083726,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1729549363.9531925,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1173,
        "uid": 0,
        "version": "1383220658",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 21 October 2024  18:23:05 -0400 (0:00:00.393)       0:01:59.997 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:23:05 -0400 (0:00:00.037)       0:02:00.035 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Show blivet_output] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 21 October 2024  18:23:05 -0400 (0:00:00.031)       0:02:00.066 ******** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/xvda3",
            "/dev/xvda4",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "path": "/opt/test1",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test2",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "dosfstools",
            "cryptsetup",
            "lvm2"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 4294967296,
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [redhat.rhel_system_roles.storage : Set the list of pools for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 21 October 2024  18:23:06 -0400 (0:00:00.049)       0:02:00.116 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 4294967296,
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Set the list of volumes for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 21 October 2024  18:23:06 -0400 (0:00:00.034)       0:02:00.150 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Remove obsolete mounts] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 21 October 2024  18:23:06 -0400 (0:00:00.029)       0:02:00.180 ******** 
changed: [managed-node2] => (item={'path': '/opt/test1', 'state': 'absent'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "mount_info": {
        "path": "/opt/test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 21 October 2024  18:23:06 -0400 (0:00:00.456)       0:02:00.636 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Set up new/current mounts] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 21 October 2024  18:23:07 -0400 (0:00:00.734)       0:02:01.371 ******** 
changed: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [redhat.rhel_system_roles.storage : Manage mount ownership/permissions] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 21 October 2024  18:23:07 -0400 (0:00:00.473)       0:02:01.845 ******** 
skipping: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 21 October 2024  18:23:07 -0400 (0:00:00.051)       0:02:01.896 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 21 October 2024  18:23:08 -0400 (0:00:00.695)       0:02:02.592 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549365.6252,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "cb36f850ef5d183530e7e4584114c5334485267a",
        "ctime": 1729549365.5961998,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 872415378,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0600",
        "mtime": 1729549365.5961998,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 53,
        "uid": 0,
        "version": "468128933",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 21 October 2024  18:23:08 -0400 (0:00:00.385)       0:02:02.978 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Update facts] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 21 October 2024  18:23:08 -0400 (0:00:00.038)       0:02:03.016 ******** 
ok: [managed-node2]

TASK [Assert to implicitly preserve encryption on existing pool] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:249
Monday 21 October 2024  18:23:09 -0400 (0:00:00.966)       0:02:03.982 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the file] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml:11
Monday 21 October 2024  18:23:09 -0400 (0:00:00.028)       0:02:04.010 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549382.9832778,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1729549382.9832778,
        "dev": 64769,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 131,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0644",
        "mtime": 1729549382.9832778,
        "nlink": 1,
        "path": "/opt/test2/quux",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1307008256",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Assert file presence] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-data-preservation.yml:16
Monday 21 October 2024  18:23:10 -0400 (0:00:00.358)       0:02:04.369 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify role results] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:259
Monday 21 October 2024  18:23:10 -0400 (0:00:00.033)       0:02:04.403 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:2
Monday 21 October 2024  18:23:10 -0400 (0:00:00.048)       0:02:04.451 ******** 
ok: [managed-node2] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 4294967296,
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:7
Monday 21 October 2024  18:23:10 -0400 (0:00:00.043)       0:02:04.495 ******** 
skipping: [managed-node2] => {}

TASK [Collect info about the volumes.] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:15
Monday 21 October 2024  18:23:10 -0400 (0:00:00.035)       0:02:04.530 ******** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test2",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "6cc18fb1-5dbc-4cae-80cb-22fca6458d6d"
        },
        "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "size": "10G",
            "type": "crypt",
            "uuid": "nSBQvS-xreE-rXgK-O8WW-JK9I-ShAv-3U0VhC"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "vfat",
            "label": "",
            "mountpoint": "/boot/efi",
            "name": "/dev/xvda2",
            "size": "200M",
            "type": "partition",
            "uuid": "7B77-95E7"
        },
        "/dev/xvda3": {
            "fstype": "xfs",
            "label": "boot",
            "mountpoint": "/boot",
            "name": "/dev/xvda3",
            "size": "1G",
            "type": "partition",
            "uuid": "a8cc2a47-4cf2-4d6f-8916-f69641ec5919"
        },
        "/dev/xvda4": {
            "fstype": "xfs",
            "label": "root",
            "mountpoint": "/",
            "name": "/dev/xvda4",
            "size": "248.8G",
            "type": "partition",
            "uuid": "1b4086e3-4d44-4b6e-99dc-43b96b9fea96"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:20
Monday 21 October 2024  18:23:10 -0400 (0:00:00.365)       0:02:04.896 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002947",
    "end": "2024-10-21 18:23:11.141134",
    "rc": 0,
    "start": "2024-10-21 18:23:11.138187"
}

STDOUT:

UUID=1b4086e3-4d44-4b6e-99dc-43b96b9fea96	/	xfs	defaults	0	0
UUID=a8cc2a47-4cf2-4d6f-8916-f69641ec5919	/boot	xfs	defaults	0	0
UUID=7B77-95E7	/boot/efi	vfat	defaults,uid=0,gid=0,umask=077,shortname=winnt	0	2
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
# system_role:storage
/dev/mapper/foo-test1 /opt/test2 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:25
Monday 21 October 2024  18:23:11 -0400 (0:00:00.372)       0:02:05.268 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002946",
    "end": "2024-10-21 18:23:11.490545",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-10-21 18:23:11.487599"
}

STDOUT:

luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7 /dev/sda -

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:34
Monday 21 October 2024  18:23:11 -0400 (0:00:00.371)       0:02:05.639 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml for managed-node2 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks1', 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:5
Monday 21 October 2024  18:23:11 -0400 (0:00:00.084)       0:02:05.724 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:18
Monday 21 October 2024  18:23:11 -0400 (0:00:00.035)       0:02:05.759 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028122",
    "end": "2024-10-21 18:23:12.017717",
    "rc": 0,
    "start": "2024-10-21 18:23:11.989595"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:24
Monday 21 October 2024  18:23:12 -0400 (0:00:00.397)       0:02:06.157 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool.yml:34
Monday 21 October 2024  18:23:12 -0400 (0:00:00.097)       0:02:06.255 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml for managed-node2 => (item=members)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node2 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 21 October 2024  18:23:12 -0400 (0:00:00.051)       0:02:06.307 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 21 October 2024  18:23:12 -0400 (0:00:00.051)       0:02:06.358 ******** 
ok: [managed-node2] => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
    "pv": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
}

TASK [Set pvs lvm length] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 21 October 2024  18:23:12 -0400 (0:00:00.358)       0:02:06.717 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 21 October 2024  18:23:12 -0400 (0:00:00.044)       0:02:06.761 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 21 October 2024  18:23:12 -0400 (0:00:00.062)       0:02:06.824 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 21 October 2024  18:23:12 -0400 (0:00:00.082)       0:02:06.906 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "crypt"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 21 October 2024  18:23:12 -0400 (0:00:00.044)       0:02:06.950 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 21 October 2024  18:23:12 -0400 (0:00:00.033)       0:02:06.984 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 21 October 2024  18:23:12 -0400 (0:00:00.029)       0:02:07.013 ******** 
ok: [managed-node2] => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 21 October 2024  18:23:12 -0400 (0:00:00.043)       0:02:07.057 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0
}

STDOUT:


** (process:182566): WARNING **: 18:23:13.221: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory
True



STDERR:

Shared connection to 10.31.8.235 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:73
Monday 21 October 2024  18:23:13 -0400 (0:00:00.388)       0:02:07.446 ******** 
skipping: [managed-node2] => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
}
skipping: [managed-node2] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:83
Monday 21 October 2024  18:23:13 -0400 (0:00:00.052)       0:02:07.498 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml for managed-node2

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:8
Monday 21 October 2024  18:23:13 -0400 (0:00:00.059)       0:02:07.557 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:14
Monday 21 October 2024  18:23:13 -0400 (0:00:00.024)       0:02:07.582 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:19
Monday 21 October 2024  18:23:13 -0400 (0:00:00.023)       0:02:07.606 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:24
Monday 21 October 2024  18:23:13 -0400 (0:00:00.023)       0:02:07.629 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:29
Monday 21 October 2024  18:23:13 -0400 (0:00:00.024)       0:02:07.654 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:37
Monday 21 October 2024  18:23:13 -0400 (0:00:00.023)       0:02:07.678 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:46
Monday 21 October 2024  18:23:13 -0400 (0:00:00.023)       0:02:07.702 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:55
Monday 21 October 2024  18:23:13 -0400 (0:00:00.023)       0:02:07.725 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:64
Monday 21 October 2024  18:23:13 -0400 (0:00:00.024)       0:02:07.750 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:74
Monday 21 October 2024  18:23:13 -0400 (0:00:00.023)       0:02:07.774 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-md.yml:83
Monday 21 October 2024  18:23:13 -0400 (0:00:00.022)       0:02:07.796 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:86
Monday 21 October 2024  18:23:13 -0400 (0:00:00.024)       0:02:07.821 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node2

TASK [Validate pool member LVM RAID settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 21 October 2024  18:23:13 -0400 (0:00:00.046)       0:02:07.868 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Monday 21 October 2024  18:23:13 -0400 (0:00:00.044)       0:02:07.912 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Monday 21 October 2024  18:23:13 -0400 (0:00:00.033)       0:02:07.945 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Monday 21 October 2024  18:23:13 -0400 (0:00:00.039)       0:02:07.985 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Monday 21 October 2024  18:23:13 -0400 (0:00:00.028)       0:02:08.013 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Monday 21 October 2024  18:23:13 -0400 (0:00:00.031)       0:02:08.045 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Monday 21 October 2024  18:23:14 -0400 (0:00:00.037)       0:02:08.082 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Monday 21 October 2024  18:23:14 -0400 (0:00:00.050)       0:02:08.133 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:89
Monday 21 October 2024  18:23:14 -0400 (0:00:00.038)       0:02:08.171 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node2

TASK [Validate pool member thinpool settings] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 21 October 2024  18:23:14 -0400 (0:00:00.058)       0:02:08.230 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about thinpool] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:8
Monday 21 October 2024  18:23:14 -0400 (0:00:00.109)       0:02:08.340 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:16
Monday 21 October 2024  18:23:14 -0400 (0:00:00.025)       0:02:08.366 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:22
Monday 21 October 2024  18:23:14 -0400 (0:00:00.023)       0:02:08.389 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-thin.yml:26
Monday 21 October 2024  18:23:14 -0400 (0:00:00.025)       0:02:08.414 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:92
Monday 21 October 2024  18:23:14 -0400 (0:00:00.026)       0:02:08.441 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node2

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 21 October 2024  18:23:14 -0400 (0:00:00.059)       0:02:08.500 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 21 October 2024  18:23:14 -0400 (0:00:00.057)       0:02:08.558 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml for managed-node2 => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7)

TASK [Get the backing device path] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:2
Monday 21 October 2024  18:23:14 -0400 (0:00:00.074)       0:02:08.632 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/disk/by-uuid/a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
    ],
    "delta": "0:00:00.002933",
    "end": "2024-10-21 18:23:14.900468",
    "rc": 0,
    "start": "2024-10-21 18:23:14.897535"
}

STDOUT:

/dev/sda

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:12
Monday 21 October 2024  18:23:14 -0400 (0:00:00.400)       0:02:09.033 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this member] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:18
Monday 21 October 2024  18:23:15 -0400 (0:00:00.813)       0:02:09.846 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cryptsetup",
        "luksDump",
        "/dev/sda"
    ],
    "delta": "0:00:00.006209",
    "end": "2024-10-21 18:23:16.155861",
    "rc": 0,
    "start": "2024-10-21 18:23:16.149652"
}

STDOUT:

LUKS header information for /dev/sda

Version:       	1
Cipher name:   	aes
Cipher mode:   	xts-plain64
Hash spec:     	sha256
Payload offset:	16384
MK bits:       	512
MK digest:     	ce 2a 99 3b b9 20 2a 25 a1 d4 b0 b4 fd 92 ee ab a3 eb 49 c4 
MK salt:       	70 a3 2c 58 66 17 37 08 3d dd b9 79 1c 84 36 00 
               	7d 33 a9 40 7b 11 10 c7 0d 76 89 93 be 6f cf 5d 
MK iterations: 	106910
UUID:          	a1c15428-f81f-4ba5-b6d8-cb21453ee5a7

Key Slot 0: ENABLED
	Iterations:         	1710564
	Salt:               	1a 7e 96 12 d4 fc 6d 93 0e 80 f2 cf d0 6b 13 db 
	                      	95 dd ca 05 5e 24 90 09 ea fb b3 f9 45 33 0b 8c 
	Key material offset:	8
	AF stripes:            	4000
Key Slot 1: DISABLED
Key Slot 2: DISABLED
Key Slot 3: DISABLED
Key Slot 4: DISABLED
Key Slot 5: DISABLED
Key Slot 6: DISABLED
Key Slot 7: DISABLED

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:26
Monday 21 October 2024  18:23:16 -0400 (0:00:00.489)       0:02:10.336 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:38
Monday 21 October 2024  18:23:16 -0400 (0:00:00.067)       0:02:10.404 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-encryption.yml:50
Monday 21 October 2024  18:23:16 -0400 (0:00:00.037)       0:02:10.441 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 21 October 2024  18:23:16 -0400 (0:00:00.032)       0:02:10.473 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node2 => (item=/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7)

TASK [Set variables used by tests] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Monday 21 October 2024  18:23:16 -0400 (0:00:00.063)       0:02:10.537 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [
            "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7 /dev/sda -"
        ]
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Monday 21 October 2024  18:23:16 -0400 (0:00:00.073)       0:02:10.610 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Monday 21 October 2024  18:23:16 -0400 (0:00:00.055)       0:02:10.666 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Monday 21 October 2024  18:23:16 -0400 (0:00:00.079)       0:02:10.746 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Monday 21 October 2024  18:23:16 -0400 (0:00:00.058)       0:02:10.804 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Monday 21 October 2024  18:23:16 -0400 (0:00:00.056)       0:02:10.860 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 21 October 2024  18:23:16 -0400 (0:00:00.033)       0:02:10.894 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:95
Monday 21 October 2024  18:23:16 -0400 (0:00:00.031)       0:02:10.925 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node2

TASK [Validate pool member VDO settings] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 21 October 2024  18:23:16 -0400 (0:00:00.070)       0:02:10.996 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about VDO deduplication] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Monday 21 October 2024  18:23:16 -0400 (0:00:00.062)       0:02:11.058 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Monday 21 October 2024  18:23:17 -0400 (0:00:00.039)       0:02:11.098 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Monday 21 October 2024  18:23:17 -0400 (0:00:00.039)       0:02:11.137 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Monday 21 October 2024  18:23:17 -0400 (0:00:00.056)       0:02:11.193 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Monday 21 October 2024  18:23:17 -0400 (0:00:00.062)       0:02:11.256 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Monday 21 October 2024  18:23:17 -0400 (0:00:00.047)       0:02:11.303 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Monday 21 October 2024  18:23:17 -0400 (0:00:00.053)       0:02:11.357 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:98
Monday 21 October 2024  18:23:17 -0400 (0:00:00.060)       0:02:11.417 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml for managed-node2

TASK [Run 'stratis report'] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 21 October 2024  18:23:17 -0400 (0:00:00.090)       0:02:11.508 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 21 October 2024  18:23:17 -0400 (0:00:00.031)       0:02:11.540 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 21 October 2024  18:23:17 -0400 (0:00:00.027)       0:02:11.567 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 21 October 2024  18:23:17 -0400 (0:00:00.083)       0:02:11.651 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 21 October 2024  18:23:17 -0400 (0:00:00.031)       0:02:11.682 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 21 October 2024  18:23:17 -0400 (0:00:00.036)       0:02:11.719 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-members.yml:101
Monday 21 October 2024  18:23:17 -0400 (0:00:00.038)       0:02:11.757 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 21 October 2024  18:23:17 -0400 (0:00:00.042)       0:02:11.800 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Set storage volume test variables] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:2
Monday 21 October 2024  18:23:17 -0400 (0:00:00.080)       0:02:11.880 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:19
Monday 21 October 2024  18:23:17 -0400 (0:00:00.085)       0:02:11.966 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 21 October 2024  18:23:18 -0400 (0:00:00.192)       0:02:12.159 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 21 October 2024  18:23:18 -0400 (0:00:00.039)       0:02:12.198 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 21 October 2024  18:23:18 -0400 (0:00:00.043)       0:02:12.242 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 21 October 2024  18:23:18 -0400 (0:00:00.025)       0:02:12.267 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 21 October 2024  18:23:18 -0400 (0:00:00.030)       0:02:12.298 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 21 October 2024  18:23:18 -0400 (0:00:00.022)       0:02:12.321 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 21 October 2024  18:23:18 -0400 (0:00:00.027)       0:02:12.349 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 21 October 2024  18:23:18 -0400 (0:00:00.031)       0:02:12.380 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 21 October 2024  18:23:18 -0400 (0:00:00.035)       0:02:12.416 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 21 October 2024  18:23:18 -0400 (0:00:00.036)       0:02:12.452 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 21 October 2024  18:23:18 -0400 (0:00:00.036)       0:02:12.489 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 21 October 2024  18:23:18 -0400 (0:00:00.040)       0:02:12.530 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test2 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test2 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 21 October 2024  18:23:18 -0400 (0:00:00.112)       0:02:12.643 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 21 October 2024  18:23:18 -0400 (0:00:00.084)       0:02:12.727 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 21 October 2024  18:23:18 -0400 (0:00:00.080)       0:02:12.808 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 21 October 2024  18:23:18 -0400 (0:00:00.056)       0:02:12.864 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Monday 21 October 2024  18:23:18 -0400 (0:00:00.045)       0:02:12.910 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 21 October 2024  18:23:18 -0400 (0:00:00.036)       0:02:12.946 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 21 October 2024  18:23:18 -0400 (0:00:00.055)       0:02:13.002 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 21 October 2024  18:23:18 -0400 (0:00:00.046)       0:02:13.048 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549387.7402992,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1729549361.2521803,
        "dev": 5,
        "device_type": 64769,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8203,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1729549361.2521803,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 21 October 2024  18:23:19 -0400 (0:00:00.451)       0:02:13.500 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 21 October 2024  18:23:19 -0400 (0:00:00.058)       0:02:13.558 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 21 October 2024  18:23:19 -0400 (0:00:00.038)       0:02:13.597 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 21 October 2024  18:23:19 -0400 (0:00:00.040)       0:02:13.637 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 21 October 2024  18:23:19 -0400 (0:00:00.035)       0:02:13.673 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 21 October 2024  18:23:19 -0400 (0:00:00.029)       0:02:13.702 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 21 October 2024  18:23:19 -0400 (0:00:00.038)       0:02:13.741 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 21 October 2024  18:23:19 -0400 (0:00:00.022)       0:02:13.764 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 21 October 2024  18:23:20 -0400 (0:00:00.832)       0:02:14.596 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 21 October 2024  18:23:20 -0400 (0:00:00.073)       0:02:14.670 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 21 October 2024  18:23:20 -0400 (0:00:00.136)       0:02:14.807 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 21 October 2024  18:23:20 -0400 (0:00:00.101)       0:02:14.909 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 21 October 2024  18:23:20 -0400 (0:00:00.043)       0:02:14.952 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 21 October 2024  18:23:20 -0400 (0:00:00.044)       0:02:14.997 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 21 October 2024  18:23:20 -0400 (0:00:00.041)       0:02:15.039 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 21 October 2024  18:23:21 -0400 (0:00:00.043)       0:02:15.082 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 21 October 2024  18:23:21 -0400 (0:00:00.042)       0:02:15.125 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 21 October 2024  18:23:21 -0400 (0:00:00.115)       0:02:15.241 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 21 October 2024  18:23:21 -0400 (0:00:00.121)       0:02:15.362 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 21 October 2024  18:23:21 -0400 (0:00:00.077)       0:02:15.439 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 21 October 2024  18:23:21 -0400 (0:00:00.077)       0:02:15.517 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 21 October 2024  18:23:21 -0400 (0:00:00.061)       0:02:15.579 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 21 October 2024  18:23:21 -0400 (0:00:00.041)       0:02:15.620 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 21 October 2024  18:23:21 -0400 (0:00:00.039)       0:02:15.660 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 21 October 2024  18:23:21 -0400 (0:00:00.035)       0:02:15.695 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 21 October 2024  18:23:21 -0400 (0:00:00.029)       0:02:15.725 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 21 October 2024  18:23:21 -0400 (0:00:00.025)       0:02:15.750 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 21 October 2024  18:23:21 -0400 (0:00:00.032)       0:02:15.782 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 21 October 2024  18:23:21 -0400 (0:00:00.032)       0:02:15.815 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 21 October 2024  18:23:21 -0400 (0:00:00.028)       0:02:15.843 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 21 October 2024  18:23:21 -0400 (0:00:00.025)       0:02:15.869 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 21 October 2024  18:23:21 -0400 (0:00:00.025)       0:02:15.894 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 21 October 2024  18:23:21 -0400 (0:00:00.022)       0:02:15.917 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 21 October 2024  18:23:22 -0400 (0:00:00.403)       0:02:16.321 ******** 
ok: [managed-node2] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 21 October 2024  18:23:22 -0400 (0:00:00.436)       0:02:16.757 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 21 October 2024  18:23:22 -0400 (0:00:00.052)       0:02:16.809 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 21 October 2024  18:23:22 -0400 (0:00:00.036)       0:02:16.846 ******** 
ok: [managed-node2] => {
    "bytes": 10715943403,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 21 October 2024  18:23:23 -0400 (0:00:00.458)       0:02:17.304 ******** 
skipping: [managed-node2] => {}

TASK [Show test blockinfo] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 21 October 2024  18:23:23 -0400 (0:00:00.063)       0:02:17.368 ******** 
skipping: [managed-node2] => {}

TASK [Show test pool size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 21 October 2024  18:23:23 -0400 (0:00:00.106)       0:02:17.474 ******** 
skipping: [managed-node2] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 21 October 2024  18:23:23 -0400 (0:00:00.061)       0:02:17.536 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 21 October 2024  18:23:23 -0400 (0:00:00.060)       0:02:17.597 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 21 October 2024  18:23:23 -0400 (0:00:00.037)       0:02:17.635 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 21 October 2024  18:23:23 -0400 (0:00:00.035)       0:02:17.670 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 21 October 2024  18:23:23 -0400 (0:00:00.042)       0:02:17.713 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 21 October 2024  18:23:23 -0400 (0:00:00.039)       0:02:17.752 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 21 October 2024  18:23:23 -0400 (0:00:00.040)       0:02:17.792 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 21 October 2024  18:23:23 -0400 (0:00:00.041)       0:02:17.834 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 21 October 2024  18:23:23 -0400 (0:00:00.040)       0:02:17.874 ******** 
skipping: [managed-node2] => {}

TASK [Show volume thin pool size] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 21 October 2024  18:23:23 -0400 (0:00:00.048)       0:02:17.923 ******** 
skipping: [managed-node2] => {}

TASK [Show test volume size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 21 October 2024  18:23:24 -0400 (0:00:00.185)       0:02:18.108 ******** 
skipping: [managed-node2] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 21 October 2024  18:23:24 -0400 (0:00:00.044)       0:02:18.152 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 21 October 2024  18:23:24 -0400 (0:00:00.063)       0:02:18.216 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 21 October 2024  18:23:24 -0400 (0:00:00.037)       0:02:18.254 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 21 October 2024  18:23:24 -0400 (0:00:00.053)       0:02:18.308 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 21 October 2024  18:23:24 -0400 (0:00:00.067)       0:02:18.375 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 21 October 2024  18:23:24 -0400 (0:00:00.071)       0:02:18.447 ******** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 21 October 2024  18:23:24 -0400 (0:00:00.089)       0:02:18.537 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 21 October 2024  18:23:24 -0400 (0:00:00.084)       0:02:18.622 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 21 October 2024  18:23:24 -0400 (0:00:00.127)       0:02:18.750 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.036787",
    "end": "2024-10-21 18:23:25.179694",
    "rc": 0,
    "start": "2024-10-21 18:23:25.142907"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 21 October 2024  18:23:25 -0400 (0:00:00.593)       0:02:19.343 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 21 October 2024  18:23:25 -0400 (0:00:00.105)       0:02:19.449 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 21 October 2024  18:23:25 -0400 (0:00:00.099)       0:02:19.548 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 21 October 2024  18:23:25 -0400 (0:00:00.098)       0:02:19.647 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 21 October 2024  18:23:25 -0400 (0:00:00.064)       0:02:19.712 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 21 October 2024  18:23:25 -0400 (0:00:00.074)       0:02:19.787 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:25
Monday 21 October 2024  18:23:25 -0400 (0:00:00.103)       0:02:19.890 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:44
Monday 21 October 2024  18:23:25 -0400 (0:00:00.078)       0:02:19.969 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:54
Monday 21 October 2024  18:23:25 -0400 (0:00:00.038)       0:02:20.007 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:262
Monday 21 October 2024  18:23:25 -0400 (0:00:00.038)       0:02:20.045 ******** 

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:2
Monday 21 October 2024  18:23:26 -0400 (0:00:00.134)       0:02:20.179 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Ensure ansible_facts used by role] ****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 21 October 2024  18:23:26 -0400 (0:00:00.088)       0:02:20.268 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set platform/version specific variables] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 21 October 2024  18:23:26 -0400 (0:00:00.081)       0:02:20.349 ******** 
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node2] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node2] => (item=RedHat_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/vars/RedHat_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.yml"
}
skipping: [managed-node2] => (item=RedHat_9.5.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "item": "RedHat_9.5.yml",
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if system is ostree] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 21 October 2024  18:23:26 -0400 (0:00:00.109)       0:02:20.459 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 21 October 2024  18:23:26 -0400 (0:00:00.052)       0:02:20.511 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:5
Monday 21 October 2024  18:23:26 -0400 (0:00:00.076)       0:02:20.587 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:9
Monday 21 October 2024  18:23:26 -0400 (0:00:00.061)       0:02:20.650 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Include the appropriate provider tasks] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main.yml:13
Monday 21 October 2024  18:23:26 -0400 (0:00:00.096)       0:02:20.747 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml for managed-node2

TASK [redhat.rhel_system_roles.storage : Make sure blivet is available] ********
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 21 October 2024  18:23:26 -0400 (0:00:00.163)       0:02:20.911 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Show storage_pools] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 21 October 2024  18:23:26 -0400 (0:00:00.078)       0:02:20.990 ******** 
ok: [managed-node2] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined. 'storage_pools' is undefined"
}

TASK [redhat.rhel_system_roles.storage : Show storage_volumes] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 21 October 2024  18:23:27 -0400 (0:00:00.080)       0:02:21.071 ******** 
ok: [managed-node2] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "type": "disk"
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Get required packages] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 21 October 2024  18:23:27 -0400 (0:00:00.054)       0:02:21.125 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Enable copr repositories if needed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 21 October 2024  18:23:27 -0400 (0:00:00.074)       0:02:21.200 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Make sure required packages are installed] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 21 October 2024  18:23:27 -0400 (0:00:00.077)       0:02:21.278 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Get service facts] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 21 October 2024  18:23:27 -0400 (0:00:00.064)       0:02:21.342 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Set storage_cryptsetup_services] ******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 21 October 2024  18:23:27 -0400 (0:00:00.053)       0:02:21.395 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 21 October 2024  18:23:27 -0400 (0:00:00.107)       0:02:21.503 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 21 October 2024  18:23:27 -0400 (0:00:00.034)       0:02:21.538 ******** 
changed: [managed-node2] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "luks"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "password": "-",
            "state": "absent"
        }
    ],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/xvda3",
        "/dev/xvda4"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test2",
            "src": "/dev/mapper/foo-test1",
            "state": "absent"
        }
    ],
    "packages": [
        "dosfstools",
        "xfsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "_mount_id": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "lvmpv",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10729029632,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [redhat.rhel_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 21 October 2024  18:23:29 -0400 (0:00:02.155)       0:02:23.693 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [redhat.rhel_system_roles.storage : Check if /etc/fstab is present] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 21 October 2024  18:23:29 -0400 (0:00:00.070)       0:02:23.764 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549387.6862988,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "70d2ec29fab7d6430b316089b5a59848503fec51",
        "ctime": 1729549387.685299,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 822083726,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1729549387.685299,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1173,
        "uid": 0,
        "version": "1383220658",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 21 October 2024  18:23:30 -0400 (0:00:00.634)       0:02:24.398 ******** 
ok: [managed-node2] => {
    "backup": "",
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 21 October 2024  18:23:30 -0400 (0:00:00.537)       0:02:24.936 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Show blivet_output] *******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 21 October 2024  18:23:30 -0400 (0:00:00.044)       0:02:24.981 ******** 
ok: [managed-node2] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "luks"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "password": "-",
                "state": "absent"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/xvda3",
            "/dev/xvda4"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test2",
                "src": "/dev/mapper/foo-test1",
                "state": "absent"
            }
        ],
        "packages": [
            "dosfstools",
            "xfsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "_mount_id": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "lvmpv",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10729029632,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [redhat.rhel_system_roles.storage : Set the list of pools for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 21 October 2024  18:23:30 -0400 (0:00:00.085)       0:02:25.067 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Set the list of volumes for test verification] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 21 October 2024  18:23:31 -0400 (0:00:00.050)       0:02:25.117 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "_mount_id": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "lvmpv",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10729029632,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [redhat.rhel_system_roles.storage : Remove obsolete mounts] ***************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 21 October 2024  18:23:31 -0400 (0:00:00.076)       0:02:25.194 ******** 
changed: [managed-node2] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test2",
        "src": "/dev/mapper/foo-test1",
        "state": "absent"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 21 October 2024  18:23:31 -0400 (0:00:00.508)       0:02:25.702 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Set up new/current mounts] ************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 21 October 2024  18:23:32 -0400 (0:00:00.812)       0:02:26.514 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Manage mount ownership/permissions] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 21 October 2024  18:23:32 -0400 (0:00:00.110)       0:02:26.625 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 21 October 2024  18:23:32 -0400 (0:00:00.103)       0:02:26.728 ******** 
ok: [managed-node2] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [redhat.rhel_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 21 October 2024  18:23:33 -0400 (0:00:00.771)       0:02:27.500 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549365.6252,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "cb36f850ef5d183530e7e4584114c5334485267a",
        "ctime": 1729549365.5961998,
        "dev": 51716,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 872415378,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0600",
        "mtime": 1729549365.5961998,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 53,
        "uid": 0,
        "version": "468128933",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [redhat.rhel_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 21 October 2024  18:23:33 -0400 (0:00:00.447)       0:02:27.948 ******** 
changed: [managed-node2] => (item={'backing_device': '/dev/sda', 'name': 'luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7', 'password': '-', 'state': 'absent'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
        "password": "-",
        "state": "absent"
    },
    "found": 1
}

MSG:

1 line(s) removed

TASK [redhat.rhel_system_roles.storage : Update facts] *************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 21 October 2024  18:23:34 -0400 (0:00:00.474)       0:02:28.423 ******** 
ok: [managed-node2]

TASK [Verify role results] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:272
Monday 21 October 2024  18:23:35 -0400 (0:00:00.942)       0:02:29.366 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml for managed-node2

TASK [Print out pool information] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:2
Monday 21 October 2024  18:23:35 -0400 (0:00:00.053)       0:02:29.419 ******** 
skipping: [managed-node2] => {}

TASK [Print out volume information] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:7
Monday 21 October 2024  18:23:35 -0400 (0:00:00.035)       0:02:29.454 ******** 
ok: [managed-node2] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "_mount_id": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "lvmpv",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10729029632,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:15
Monday 21 October 2024  18:23:35 -0400 (0:00:00.040)       0:02:29.494 ******** 
ok: [managed-node2] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "vfat",
            "label": "",
            "mountpoint": "/boot/efi",
            "name": "/dev/xvda2",
            "size": "200M",
            "type": "partition",
            "uuid": "7B77-95E7"
        },
        "/dev/xvda3": {
            "fstype": "xfs",
            "label": "boot",
            "mountpoint": "/boot",
            "name": "/dev/xvda3",
            "size": "1G",
            "type": "partition",
            "uuid": "a8cc2a47-4cf2-4d6f-8916-f69641ec5919"
        },
        "/dev/xvda4": {
            "fstype": "xfs",
            "label": "root",
            "mountpoint": "/",
            "name": "/dev/xvda4",
            "size": "248.8G",
            "type": "partition",
            "uuid": "1b4086e3-4d44-4b6e-99dc-43b96b9fea96"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:20
Monday 21 October 2024  18:23:35 -0400 (0:00:00.361)       0:02:29.856 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004062",
    "end": "2024-10-21 18:23:37.093095",
    "rc": 0,
    "start": "2024-10-21 18:23:36.089033"
}

STDOUT:

UUID=1b4086e3-4d44-4b6e-99dc-43b96b9fea96	/	xfs	defaults	0	0
UUID=a8cc2a47-4cf2-4d6f-8916-f69641ec5919	/boot	xfs	defaults	0	0
UUID=7B77-95E7	/boot/efi	vfat	defaults,uid=0,gid=0,umask=077,shortname=winnt	0	2
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_engineering_nfs/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-bos-c01-eng01-nfs01b.storage.bos.redhat.com:/devops_brew_scratch_nfs/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
# system_role:storage

TASK [Read the /etc/crypttab file] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:25
Monday 21 October 2024  18:23:37 -0400 (0:00:01.366)       0:02:31.223 ******** 
ok: [managed-node2] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003044",
    "end": "2024-10-21 18:23:37.458429",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-10-21 18:23:37.455385"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:34
Monday 21 October 2024  18:23:37 -0400 (0:00:00.362)       0:02:31.585 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:44
Monday 21 October 2024  18:23:37 -0400 (0:00:00.020)       0:02:31.606 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml for managed-node2 => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks1', 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'lvmpv', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'foo', 'raid_level': None, 'size': 10729029632, 'state': 'absent', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7', '_raw_device': '/dev/sda', '_mount_id': '/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7'})

TASK [Set storage volume test variables] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:2
Monday 21 October 2024  18:23:37 -0400 (0:00:00.058)       0:02:31.664 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:19
Monday 21 October 2024  18:23:37 -0400 (0:00:00.057)       0:02:31.722 ******** 
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node2 => (item=mount)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node2 => (item=fstab)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node2 => (item=fs)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml for managed-node2 => (item=device)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node2 => (item=encryption)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml for managed-node2 => (item=md)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml for managed-node2 => (item=size)
included: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node2 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 21 October 2024  18:23:37 -0400 (0:00:00.295)       0:02:32.018 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/luks-a1c15428-f81f-4ba5-b6d8-cb21453ee5a7"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 21 October 2024  18:23:38 -0400 (0:00:00.097)       0:02:32.116 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 21 October 2024  18:23:38 -0400 (0:00:00.114)       0:02:32.230 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 21 October 2024  18:23:38 -0400 (0:00:00.036)       0:02:32.267 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 21 October 2024  18:23:38 -0400 (0:00:00.033)       0:02:32.300 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 21 October 2024  18:23:38 -0400 (0:00:00.028)       0:02:32.329 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 21 October 2024  18:23:38 -0400 (0:00:00.031)       0:02:32.360 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 21 October 2024  18:23:38 -0400 (0:00:00.031)       0:02:32.392 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 21 October 2024  18:23:38 -0400 (0:00:00.027)       0:02:32.419 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 21 October 2024  18:23:38 -0400 (0:00:00.027)       0:02:32.447 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 21 October 2024  18:23:38 -0400 (0:00:00.022)       0:02:32.470 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 21 October 2024  18:23:38 -0400 (0:00:00.032)       0:02:32.503 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 21 October 2024  18:23:38 -0400 (0:00:00.088)       0:02:32.591 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 21 October 2024  18:23:38 -0400 (0:00:00.041)       0:02:32.633 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 21 October 2024  18:23:38 -0400 (0:00:00.051)       0:02:32.685 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 21 October 2024  18:23:38 -0400 (0:00:00.055)       0:02:32.741 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Monday 21 October 2024  18:23:38 -0400 (0:00:00.034)       0:02:32.775 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 21 October 2024  18:23:38 -0400 (0:00:00.024)       0:02:32.800 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 21 October 2024  18:23:38 -0400 (0:00:00.024)       0:02:32.825 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 21 October 2024  18:23:38 -0400 (0:00:00.036)       0:02:32.862 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "atime": 1729549409.4503965,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1729549409.4503965,
        "dev": 5,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 511,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1729549409.4503965,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 21 October 2024  18:23:39 -0400 (0:00:00.372)       0:02:33.235 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 21 October 2024  18:23:39 -0400 (0:00:00.040)       0:02:33.275 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 21 October 2024  18:23:39 -0400 (0:00:00.034)       0:02:33.310 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 21 October 2024  18:23:39 -0400 (0:00:00.038)       0:02:33.349 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 21 October 2024  18:23:39 -0400 (0:00:00.042)       0:02:33.391 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 21 October 2024  18:23:39 -0400 (0:00:00.037)       0:02:33.429 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 21 October 2024  18:23:39 -0400 (0:00:00.037)       0:02:33.466 ******** 
ok: [managed-node2] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 21 October 2024  18:23:39 -0400 (0:00:00.436)       0:02:33.902 ******** 
ok: [managed-node2] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 21 October 2024  18:23:40 -0400 (0:00:00.864)       0:02:34.767 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 21 October 2024  18:23:40 -0400 (0:00:00.027)       0:02:34.794 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 21 October 2024  18:23:40 -0400 (0:00:00.043)       0:02:34.838 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 21 October 2024  18:23:40 -0400 (0:00:00.022)       0:02:34.861 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 21 October 2024  18:23:40 -0400 (0:00:00.022)       0:02:34.883 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 21 October 2024  18:23:40 -0400 (0:00:00.022)       0:02:34.906 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 21 October 2024  18:23:40 -0400 (0:00:00.021)       0:02:34.928 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 21 October 2024  18:23:40 -0400 (0:00:00.021)       0:02:34.949 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 21 October 2024  18:23:40 -0400 (0:00:00.021)       0:02:34.971 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 21 October 2024  18:23:40 -0400 (0:00:00.063)       0:02:35.034 ******** 
ok: [managed-node2] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 21 October 2024  18:23:41 -0400 (0:00:00.069)       0:02:35.103 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 21 October 2024  18:23:41 -0400 (0:00:00.062)       0:02:35.166 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 21 October 2024  18:23:41 -0400 (0:00:00.086)       0:02:35.252 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 21 October 2024  18:23:41 -0400 (0:00:00.089)       0:02:35.342 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 21 October 2024  18:23:41 -0400 (0:00:00.037)       0:02:35.380 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 21 October 2024  18:23:41 -0400 (0:00:00.029)       0:02:35.409 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 21 October 2024  18:23:41 -0400 (0:00:00.029)       0:02:35.439 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 21 October 2024  18:23:41 -0400 (0:00:00.027)       0:02:35.466 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 21 October 2024  18:23:41 -0400 (0:00:00.035)       0:02:35.501 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 21 October 2024  18:23:41 -0400 (0:00:00.025)       0:02:35.527 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 21 October 2024  18:23:41 -0400 (0:00:00.024)       0:02:35.551 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 21 October 2024  18:23:41 -0400 (0:00:00.024)       0:02:35.576 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 21 October 2024  18:23:41 -0400 (0:00:00.023)       0:02:35.599 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 21 October 2024  18:23:41 -0400 (0:00:00.022)       0:02:35.622 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 21 October 2024  18:23:41 -0400 (0:00:00.022)       0:02:35.644 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 21 October 2024  18:23:41 -0400 (0:00:00.033)       0:02:35.678 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 21 October 2024  18:23:41 -0400 (0:00:00.078)       0:02:35.756 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 21 October 2024  18:23:41 -0400 (0:00:00.035)       0:02:35.791 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 21 October 2024  18:23:41 -0400 (0:00:00.026)       0:02:35.818 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 21 October 2024  18:23:41 -0400 (0:00:00.034)       0:02:35.853 ******** 
skipping: [managed-node2] => {}

TASK [Show test blockinfo] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 21 October 2024  18:23:41 -0400 (0:00:00.036)       0:02:35.889 ******** 
skipping: [managed-node2] => {}

TASK [Show test pool size] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 21 October 2024  18:23:41 -0400 (0:00:00.035)       0:02:35.924 ******** 
skipping: [managed-node2] => {}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 21 October 2024  18:23:41 -0400 (0:00:00.042)       0:02:35.967 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 21 October 2024  18:23:41 -0400 (0:00:00.054)       0:02:36.021 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 21 October 2024  18:23:41 -0400 (0:00:00.039)       0:02:36.061 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 21 October 2024  18:23:42 -0400 (0:00:00.037)       0:02:36.098 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 21 October 2024  18:23:42 -0400 (0:00:00.036)       0:02:36.135 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 21 October 2024  18:23:42 -0400 (0:00:00.037)       0:02:36.173 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 21 October 2024  18:23:42 -0400 (0:00:00.042)       0:02:36.215 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 21 October 2024  18:23:42 -0400 (0:00:00.035)       0:02:36.251 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 21 October 2024  18:23:42 -0400 (0:00:00.036)       0:02:36.287 ******** 
skipping: [managed-node2] => {}

TASK [Show volume thin pool size] **********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 21 October 2024  18:23:42 -0400 (0:00:00.029)       0:02:36.316 ******** 
skipping: [managed-node2] => {}

TASK [Show test volume size] ***************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 21 October 2024  18:23:42 -0400 (0:00:00.031)       0:02:36.347 ******** 
skipping: [managed-node2] => {}

TASK [Establish base value for expected thin pool size] ************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 21 October 2024  18:23:42 -0400 (0:00:00.050)       0:02:36.398 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 21 October 2024  18:23:42 -0400 (0:00:00.029)       0:02:36.428 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 21 October 2024  18:23:42 -0400 (0:00:00.041)       0:02:36.469 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 21 October 2024  18:23:42 -0400 (0:00:00.049)       0:02:36.519 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 21 October 2024  18:23:42 -0400 (0:00:00.042)       0:02:36.561 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 21 October 2024  18:23:42 -0400 (0:00:00.040)       0:02:36.601 ******** 
ok: [managed-node2] => {
    "storage_test_actual_size": {
        "changed": false,
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 21 October 2024  18:23:42 -0400 (0:00:00.044)       0:02:36.646 ******** 
ok: [managed-node2] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 21 October 2024  18:23:42 -0400 (0:00:00.048)       0:02:36.695 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 21 October 2024  18:23:42 -0400 (0:00:00.099)       0:02:36.794 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 21 October 2024  18:23:42 -0400 (0:00:00.041)       0:02:36.836 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 21 October 2024  18:23:42 -0400 (0:00:00.041)       0:02:36.878 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 21 October 2024  18:23:42 -0400 (0:00:00.051)       0:02:36.930 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 21 October 2024  18:23:42 -0400 (0:00:00.041)       0:02:36.971 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 21 October 2024  18:23:42 -0400 (0:00:00.038)       0:02:37.009 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 21 October 2024  18:23:42 -0400 (0:00:00.033)       0:02:37.043 ******** 
skipping: [managed-node2] => {
    "changed": false,
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume.yml:25
Monday 21 October 2024  18:23:43 -0400 (0:00:00.035)       0:02:37.079 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:54
Monday 21 October 2024  18:23:43 -0400 (0:00:00.032)       0:02:37.111 ******** 
ok: [managed-node2] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

PLAY RECAP *********************************************************************
managed-node2              : ok=574  changed=19   unreachable=0    failed=0    skipped=558  rescued=6    ignored=0   

Monday 21 October 2024  18:23:43 -0400 (0:00:00.058)       0:02:37.170 ******** 
=============================================================================== 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state -- 10.30s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state --- 8.60s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.78s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
redhat.rhel_system_roles.storage : Get service facts -------------------- 2.20s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:51 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.16s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.74s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.50s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
Read the /etc/crypttab file --------------------------------------------- 1.42s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:25 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.39s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
Read the /etc/fstab file for volume existence --------------------------- 1.37s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/verify-role-results.yml:20 
Gathering Facts --------------------------------------------------------- 1.36s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:2 
redhat.rhel_system_roles.storage : Get required packages ---------------- 1.22s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19 
Write the key into the key file ----------------------------------------- 1.20s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/tests_luks_pool.yml:109 
redhat.rhel_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.11s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:69 
redhat.rhel_system_roles.storage : Update facts ------------------------- 1.07s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222 
redhat.rhel_system_roles.storage : Make sure blivet is available -------- 1.06s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:2 
redhat.rhel_system_roles.storage : Get required packages ---------------- 1.04s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:19 
redhat.rhel_system_roles.storage : Tell systemd to refresh its view of /etc/fstab --- 1.03s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:159 
redhat.rhel_system_roles.storage : Update facts ------------------------- 1.00s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/roles/storage/tasks/main-blivet.yml:222 
Ensure cryptsetup is present -------------------------------------------- 0.99s
/usr/share/ansible/collections/ansible_collections/redhat/rhel_system_roles/tests/storage/test-verify-volume-encryption.yml:10