ansible-playbook [core 2.17.6]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
  ansible collection location = /tmp/collections-OFw
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.12.6 (main, Sep  9 2024, 00:00:00) [GCC 14.2.1 20240801 (Red Hat 14.2.1-1)] (/usr/bin/python3.12)
  jinja version = 3.1.4
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
statically imported: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/create-test-file.yml
statically imported: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml
statically imported: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/create-test-file.yml
statically imported: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml
statically imported: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/create-test-file.yml
statically imported: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_luks_pool.yml **************************************************
1 plays in /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml

PLAY [Test LUKS pool] **********************************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:2
Wednesday 20 November 2024  12:05:04 -0500 (0:00:00.024)       0:00:00.024 **** 
[WARNING]: Platform linux on host managed-node3 is using the discovered Python
interpreter at /usr/bin/python3.12, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node3]

TASK [Enable FIPS mode] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:23
Wednesday 20 November 2024  12:05:05 -0500 (0:00:01.273)       0:00:01.297 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "lookup(\"env\", \"SYSTEM_ROLES_TEST_FIPS\") == \"true\"",
    "skip_reason": "Conditional result was False"
}

TASK [Reboot] ******************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:27
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.028)       0:00:01.325 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "lookup(\"env\", \"SYSTEM_ROLES_TEST_FIPS\") == \"true\"",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure dracut-fips] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:37
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.026)       0:00:01.352 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "lookup(\"env\", \"SYSTEM_ROLES_TEST_FIPS\") == \"true\"",
    "skip_reason": "Conditional result was False"
}

TASK [Configure boot for FIPS] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:43
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.024)       0:00:01.376 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "lookup(\"env\", \"SYSTEM_ROLES_TEST_FIPS\") == \"true\"",
    "skip_reason": "Conditional result was False"
}

TASK [Reboot] ******************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:52
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.025)       0:00:01.402 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "lookup(\"env\", \"SYSTEM_ROLES_TEST_FIPS\") == \"true\"",
    "skip_reason": "Conditional result was False"
}

TASK [Run the role] ************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:56
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.027)       0:00:01.429 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.037)       0:00:01.467 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.034)       0:00:01.502 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:05:05 -0500 (0:00:00.061)       0:00:01.564 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:05:06 -0500 (0:00:00.091)       0:00:01.655 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:05:06 -0500 (0:00:00.577)       0:00:02.232 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:05:06 -0500 (0:00:00.023)       0:00:02.256 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:05:06 -0500 (0:00:00.014)       0:00:02.271 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:05:06 -0500 (0:00:00.016)       0:00:02.287 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:05:06 -0500 (0:00:00.042)       0:00:02.329 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:05:07 -0500 (0:00:00.911)       0:00:03.241 **** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:05:07 -0500 (0:00:00.018)       0:00:03.259 **** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:05:07 -0500 (0:00:00.017)       0:00:03.277 **** 
[WARNING]: Module invocation had junk after the JSON data: sys:1:
DeprecationWarning: builtin type swigvarlink has no __module__ attribute
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:05:08 -0500 (0:00:00.693)       0:00:03.970 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Wednesday 20 November 2024  12:05:08 -0500 (0:00:00.048)       0:00:04.018 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Wednesday 20 November 2024  12:05:08 -0500 (0:00:00.047)       0:00:04.066 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "install_copr | d(false) | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Wednesday 20 November 2024  12:05:08 -0500 (0:00:00.051)       0:00:04.117 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:05:08 -0500 (0:00:00.047)       0:00:04.165 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:05:09 -0500 (0:00:00.722)       0:00:04.888 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.fedoraproject.FirewallD1.service": {
                "name": "dbus-org.fedoraproject.FirewallD1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ebtables.service": {
                "name": "ebtables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ip6tables.service": {
                "name": "ip6tables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ipset.service": {
                "name": "ipset.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iptables.service": {
                "name": "iptables.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "raid-check.service": {
                "name": "raid-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "tangd@.service": {
                "name": "tangd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:05:11 -0500 (0:00:02.167)       0:00:07.056 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:05:11 -0500 (0:00:00.050)       0:00:07.106 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:05:11 -0500 (0:00:00.020)       0:00:07.126 **** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.558)       0:00:07.685 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.055)       0:00:07.740 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122150.8614388,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "00c426ef685ab5a6f24b66e9359d8b6b49b16c5e",
        "ctime": 1732122146.6324384,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 297795793,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1732122146.6324384,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "2100948536",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.460)       0:00:08.200 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.038)       0:00:08.239 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.028)       0:00:08.267 **** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.034)       0:00:08.302 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.034)       0:00:08.336 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.034)       0:00:08.371 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.057)       0:00:08.429 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.030)       0:00:08.459 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.059)       0:00:08.519 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Wednesday 20 November 2024  12:05:12 -0500 (0:00:00.059)       0:00:08.579 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Wednesday 20 November 2024  12:05:13 -0500 (0:00:00.029)       0:00:08.608 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732121900.797422,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1732121897.3264217,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 494928085,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1732121897.3276753,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1390319532",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Wednesday 20 November 2024  12:05:13 -0500 (0:00:00.417)       0:00:09.026 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Wednesday 20 November 2024  12:05:13 -0500 (0:00:00.039)       0:00:09.065 **** 
ok: [managed-node3]

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:60
Wednesday 20 November 2024  12:05:14 -0500 (0:00:00.984)       0:00:10.050 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Get unused disks] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:66
Wednesday 20 November 2024  12:05:14 -0500 (0:00:00.033)       0:00:10.083 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node3

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Wednesday 20 November 2024  12:05:14 -0500 (0:00:00.054)       0:00:10.138 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Wednesday 20 November 2024  12:05:15 -0500 (0:00:00.782)       0:00:10.921 **** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sda"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"xfs\" LOG-SEC=\"512\"",
        "filename [xvda2] is a partition",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Wednesday 20 November 2024  12:05:15 -0500 (0:00:00.507)       0:00:11.429 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Wednesday 20 November 2024  12:05:15 -0500 (0:00:00.031)       0:00:11.460 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Wednesday 20 November 2024  12:05:15 -0500 (0:00:00.036)       0:00:11.496 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Wednesday 20 November 2024  12:05:15 -0500 (0:00:00.060)       0:00:11.557 **** 
ok: [managed-node3] => {
    "unused_disks": [
        "sda"
    ]
}

TASK [Test for correct handling of new encrypted pool w/ no key] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:76
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.035)       0:00:11.593 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node3

TASK [Store global variable value copy] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.051)       0:00:11.644 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": true,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.063)       0:00:11.707 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.053)       0:00:11.761 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.045)       0:00:11.807 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.064)       0:00:11.871 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.081)       0:00:11.952 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.044)       0:00:11.997 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.112)       0:00:12.110 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.059)       0:00:12.169 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.032)       0:00:12.202 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.092)       0:00:12.294 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.045)       0:00:12.340 **** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.036)       0:00:12.376 **** 
ok: [managed-node3] => {
    "storage_volumes": []
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:05:16 -0500 (0:00:00.034)       0:00:12.411 **** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [
        "cryptsetup",
        "lvm2"
    ],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:05:18 -0500 (0:00:01.643)       0:00:14.055 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Wednesday 20 November 2024  12:05:18 -0500 (0:00:00.039)       0:00:14.094 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Wednesday 20 November 2024  12:05:18 -0500 (0:00:00.037)       0:00:14.131 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "install_copr | d(false) | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Wednesday 20 November 2024  12:05:18 -0500 (0:00:00.045)       0:00:14.177 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:05:18 -0500 (0:00:00.055)       0:00:14.232 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:05:19 -0500 (0:00:00.801)       0:00:15.033 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:05:19 -0500 (0:00:00.046)       0:00:15.080 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:05:19 -0500 (0:00:00.089)       0:00:15.170 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:05:19 -0500 (0:00:00.027)       0:00:15.197 **** 
fatal: [managed-node3]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

encrypted pool 'foo' missing key/password

TASK [fedora.linux_system_roles.storage : Failed message] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:109
Wednesday 20 November 2024  12:05:21 -0500 (0:00:01.637)       0:00:16.835 **** 
fatal: [managed-node3]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "encrypted pool 'foo' missing key/password", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': None, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': None, 'fs_label': None, 'fs_type': None, 'mount_options': None, 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': None, 'cached': None, 'cache_devices': [], 'cache_mode': None, 'cache_size': None, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:05:21 -0500 (0:00:00.041)       0:00:16.877 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23
Wednesday 20 November 2024  12:05:21 -0500 (0:00:00.027)       0:00:16.904 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28
Wednesday 20 November 2024  12:05:21 -0500 (0:00:00.035)       0:00:16.940 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39
Wednesday 20 November 2024  12:05:21 -0500 (0:00:00.045)       0:00:16.985 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_failed_exception is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:93
Wednesday 20 November 2024  12:05:21 -0500 (0:00:00.032)       0:00:17.018 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Create a key file] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:102
Wednesday 20 November 2024  12:05:21 -0500 (0:00:00.060)       0:00:17.079 **** 
ok: [managed-node3] => {
    "changed": false,
    "gid": 0,
    "group": "root",
    "mode": "0600",
    "owner": "root",
    "path": "/tmp/storage_test7rjvd28_lukskey",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Write the key into the key file] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:109
Wednesday 20 November 2024  12:05:22 -0500 (0:00:00.565)       0:00:17.644 **** 
ok: [managed-node3] => {
    "changed": false,
    "checksum": "7a4dff3752e2baf5617c57eaac048e2b95e8af91",
    "dest": "/tmp/storage_test7rjvd28_lukskey",
    "gid": 0,
    "group": "root",
    "md5sum": "4ac07b967150835c00d0865161e48744",
    "mode": "0600",
    "owner": "root",
    "secontext": "unconfined_u:object_r:user_tmp_t:s0",
    "size": 32,
    "src": "/root/.ansible/tmp/ansible-tmp-1732122322.1287634-189982-186749156241923/.source",
    "state": "file",
    "uid": 0
}

TASK [Create an encrypted lvm pool using a key file] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:116
Wednesday 20 November 2024  12:05:22 -0500 (0:00:00.926)       0:00:18.571 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.049)       0:00:18.620 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.044)       0:00:18.664 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.059)       0:00:18.724 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.082)       0:00:18.807 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.033)       0:00:18.841 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.035)       0:00:18.876 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.031)       0:00:18.907 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.029)       0:00:18.937 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.074)       0:00:19.012 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.025)       0:00:19.037 **** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_key": "/tmp/storage_test7rjvd28_lukskey",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.025)       0:00:19.063 **** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.024)       0:00:19.088 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.025)       0:00:19.114 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.025)       0:00:19.139 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.024)       0:00:19.164 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.022)       0:00:19.186 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.049)       0:00:19.235 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:05:23 -0500 (0:00:00.015)       0:00:19.251 **** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "luks"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
            "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "state": "present"
        }
    ],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
            "state": "mounted"
        }
    ],
    "packages": [
        "cryptsetup",
        "xfsprogs",
        "lvm2"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Wednesday 20 November 2024  12:05:38 -0500 (0:00:14.984)       0:00:34.235 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Wednesday 20 November 2024  12:05:38 -0500 (0:00:00.087)       0:00:34.323 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122150.8614388,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "00c426ef685ab5a6f24b66e9359d8b6b49b16c5e",
        "ctime": 1732122146.6324384,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 297795793,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1732122146.6324384,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "2100948536",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Wednesday 20 November 2024  12:05:39 -0500 (0:00:00.476)       0:00:34.799 **** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:05:39 -0500 (0:00:00.692)       0:00:35.492 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Wednesday 20 November 2024  12:05:39 -0500 (0:00:00.032)       0:00:35.524 **** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "luks"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
                "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "state": "present"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
                "state": "mounted"
            }
        ],
        "packages": [
            "cryptsetup",
            "xfsprogs",
            "lvm2"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Wednesday 20 November 2024  12:05:39 -0500 (0:00:00.044)       0:00:35.569 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Wednesday 20 November 2024  12:05:40 -0500 (0:00:00.039)       0:00:35.608 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Wednesday 20 November 2024  12:05:40 -0500 (0:00:00.035)       0:00:35.644 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Wednesday 20 November 2024  12:05:40 -0500 (0:00:00.062)       0:00:35.707 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Wednesday 20 November 2024  12:05:41 -0500 (0:00:01.277)       0:00:36.985 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Wednesday 20 November 2024  12:05:42 -0500 (0:00:00.669)       0:00:37.654 **** 
skipping: [managed-node3] => (item={'src': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Wednesday 20 November 2024  12:05:42 -0500 (0:00:00.075)       0:00:37.730 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Wednesday 20 November 2024  12:05:42 -0500 (0:00:00.795)       0:00:38.526 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732121900.797422,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1732121897.3264217,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 494928085,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1732121897.3276753,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1390319532",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Wednesday 20 November 2024  12:05:43 -0500 (0:00:00.459)       0:00:38.985 **** 
changed: [managed-node3] => (item={'backing_device': '/dev/sda', 'name': 'luks-7257ace9-e57f-4dfc-9a4b-84a32979e624', 'password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'state': 'present'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
        "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
        "state": "present"
    }
}

MSG:

line added

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Wednesday 20 November 2024  12:05:43 -0500 (0:00:00.480)       0:00:39.466 **** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:131
Wednesday 20 November 2024  12:05:44 -0500 (0:00:00.980)       0:00:40.446 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Wednesday 20 November 2024  12:05:44 -0500 (0:00:00.035)       0:00:40.482 **** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Wednesday 20 November 2024  12:05:44 -0500 (0:00:00.055)       0:00:40.537 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Wednesday 20 November 2024  12:05:44 -0500 (0:00:00.035)       0:00:40.573 **** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "eb458450-3386-4608-bacc-da889e315bf0"
        },
        "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
            "size": "10G",
            "type": "crypt",
            "uuid": "X5NbNy-mflB-wYEn-wybt-H9mb-IrFj-H53Iyj"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "7257ace9-e57f-4dfc-9a4b-84a32979e624"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "63814bf2-dbd4-439c-b63b-6d05ca07d081"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Wednesday 20 November 2024  12:05:45 -0500 (0:00:00.483)       0:00:41.057 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003041",
    "end": "2024-11-20 12:05:45.941587",
    "rc": 0,
    "start": "2024-11-20 12:05:45.938546"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Nov 12 09:07:12 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=63814bf2-dbd4-439c-b63b-6d05ca07d081 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=eb458450-3386-4608-bacc-da889e315bf0 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Wednesday 20 November 2024  12:05:46 -0500 (0:00:00.549)       0:00:41.606 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002956",
    "end": "2024-11-20 12:05:46.355684",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-20 12:05:46.352728"
}

STDOUT:

luks-7257ace9-e57f-4dfc-9a4b-84a32979e624 /dev/sda VALUE_SPECIFIED_IN_NO_LOG_PARAMETER

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Wednesday 20 November 2024  12:05:46 -0500 (0:00:00.402)       0:00:42.009 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Wednesday 20 November 2024  12:05:46 -0500 (0:00:00.096)       0:00:42.105 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Wednesday 20 November 2024  12:05:46 -0500 (0:00:00.033)       0:00:42.139 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.025838",
    "end": "2024-11-20 12:05:46.925114",
    "rc": 0,
    "start": "2024-11-20 12:05:46.899276"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Wednesday 20 November 2024  12:05:46 -0500 (0:00:00.427)       0:00:42.567 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.040)       0:00:42.608 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.048)       0:00:42.656 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.050)       0:00:42.707 **** 
ok: [managed-node3] => (item=/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
    "pv": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.495)       0:00:43.203 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.084)       0:00:43.288 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.112)       0:00:43.401 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.072)       0:00:43.473 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "crypt"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.037)       0:00:43.510 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Wednesday 20 November 2024  12:05:47 -0500 (0:00:00.048)       0:00:43.558 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.032)       0:00:43.590 **** 
ok: [managed-node3] => (item=/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.046)       0:00:43.637 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True
sys:1: DeprecationWarning: builtin type swigvarlink has no __module__ attribute



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.45.112 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eac19bf222'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.45.112 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.389)       0:00:44.026 **** 
skipping: [managed-node3] => (item=/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "grow_supported.stdout | trim == 'True'",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.058)       0:00:44.085 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.076)       0:00:44.161 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.031)       0:00:44.192 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.036)       0:00:44.229 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.042)       0:00:44.271 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.033)       0:00:44.304 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.037)       0:00:44.342 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.031)       0:00:44.373 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.039)       0:00:44.413 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.030)       0:00:44.444 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.031)       0:00:44.476 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.030)       0:00:44.506 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Wednesday 20 November 2024  12:05:48 -0500 (0:00:00.034)       0:00:44.541 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.069)       0:00:44.610 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.066)       0:00:44.677 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.040)       0:00:44.718 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.040)       0:00:44.759 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.037)       0:00:44.796 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.036)       0:00:44.833 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.035)       0:00:44.869 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.039)       0:00:44.908 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.035)       0:00:44.944 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.068)       0:00:45.013 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.060)       0:00:45.073 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.063)       0:00:45.137 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.030)       0:00:45.168 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.029)       0:00:45.197 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.032)       0:00:45.229 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.074)       0:00:45.304 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.063)       0:00:45.368 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml for managed-node3 => (item=/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624)

TASK [Get the backing device path] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:2
Wednesday 20 November 2024  12:05:49 -0500 (0:00:00.060)       0:00:45.428 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/disk/by-uuid/7257ace9-e57f-4dfc-9a4b-84a32979e624"
    ],
    "delta": "0:00:00.003077",
    "end": "2024-11-20 12:05:50.191620",
    "rc": 0,
    "start": "2024-11-20 12:05:50.188543"
}

STDOUT:

/dev/sda

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:12
Wednesday 20 November 2024  12:05:50 -0500 (0:00:00.425)       0:00:45.854 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this member] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:18
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.758)       0:00:46.612 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cryptsetup",
        "luksDump",
        "/dev/sda"
    ],
    "delta": "0:00:00.006842",
    "end": "2024-11-20 12:05:51.416737",
    "rc": 0,
    "start": "2024-11-20 12:05:51.409895"
}

STDOUT:

LUKS header information
Version:       	2
Epoch:         	3
Metadata area: 	16384 [bytes]
Keyslots area: 	16744448 [bytes]
UUID:          	7257ace9-e57f-4dfc-9a4b-84a32979e624
Label:         	(no label)
Subsystem:     	(no subsystem)
Flags:       	(no flags)

Data segments:
  0: crypt
	offset: 16777216 [bytes]
	length: (whole device)
	cipher: aes-xts-plain64
	sector: 512 [bytes]

Keyslots:
  0: luks2
	Key:        512 bits
	Priority:   normal
	Cipher:     aes-xts-plain64
	Cipher key: 512 bits
	PBKDF:      argon2id
	Time cost:  4
	Memory:     701963
	Threads:    2
	Salt:       db 1b 2d 38 cb df 49 28 a0 06 1a 1e 16 4f 2c 29 
	            e4 ec 24 ff 25 84 a6 a0 07 57 50 f5 c3 e5 87 87 
	AF stripes: 4000
	AF hash:    sha256
	Area offset:32768 [bytes]
	Area length:258048 [bytes]
	Digest ID:  0
Tokens:
Digests:
  0: pbkdf2
	Hash:       sha256
	Iterations: 105703
	Salt:       02 ed d1 30 2e a9 c7 57 e9 61 6b bb ae 38 c6 e7 
	            f4 c8 1f ac e5 82 97 26 0d d4 b9 81 75 d4 bc 18 
	Digest:     09 ff 58 36 8e 28 80 de 44 44 7d 4f 92 5b fa 9a 
	            f8 ab 59 96 37 19 7b 24 08 d0 9e 09 2c 07 9a dc 

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:26
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.457)       0:00:47.070 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_luks_version",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:38
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.032)       0:00:47.103 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_key_size",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:50
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.031)       0:00:47.134 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_cipher",
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.030)       0:00:47.165 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node3 => (item=/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.071)       0:00:47.236 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [
            "luks-7257ace9-e57f-4dfc-9a4b-84a32979e624 /dev/sda VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
        ]
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.101)       0:00:47.338 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.099)       0:00:47.438 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Wednesday 20 November 2024  12:05:51 -0500 (0:00:00.118)       0:00:47.556 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.082)       0:00:47.639 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.093)       0:00:47.733 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.036)       0:00:47.769 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.042)       0:00:47.811 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.097)       0:00:47.909 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.076)       0:00:47.985 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.026)       0:00:48.012 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.029)       0:00:48.042 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.028)       0:00:48.070 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.027)       0:00:48.098 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.030)       0:00:48.129 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.028)       0:00:48.157 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.030)       0:00:48.188 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.080)       0:00:48.269 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.030)       0:00:48.299 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.030)       0:00:48.330 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.028)       0:00:48.359 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.030)       0:00:48.389 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.028)       0:00:48.418 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.040)       0:00:48.458 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Wednesday 20 November 2024  12:05:52 -0500 (0:00:00.061)       0:00:48.520 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.078)       0:00:48.598 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.107)       0:00:48.706 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.246)       0:00:48.952 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.118)       0:00:49.070 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.097)       0:00:49.167 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.041)       0:00:49.209 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.036)       0:00:49.246 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.040)       0:00:49.286 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.058)       0:00:49.344 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.088)       0:00:49.433 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.032)       0:00:49.465 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.041)       0:00:49.506 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.030)       0:00:49.537 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Wednesday 20 November 2024  12:05:53 -0500 (0:00:00.040)       0:00:49.578 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=eb458450-3386-4608-bacc-da889e315bf0 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.098)       0:00:49.676 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.072)       0:00:49.749 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.067)       0:00:49.817 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.058)       0:00:49.875 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.037)       0:00:49.913 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.032)       0:00:49.945 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.072)       0:00:50.018 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.075)       0:00:50.094 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122338.4894612,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1732122338.4894612,
        "dev": 6,
        "device_type": 64769,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5521,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1732122338.4894612,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.406)       0:00:50.501 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.039)       0:00:50.540 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Wednesday 20 November 2024  12:05:54 -0500 (0:00:00.030)       0:00:50.571 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.039)       0:00:50.611 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.034)       0:00:50.645 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.031)       0:00:50.676 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.037)       0:00:50.714 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.027)       0:00:50.742 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.720)       0:00:51.463 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.032)       0:00:51.496 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Wednesday 20 November 2024  12:05:55 -0500 (0:00:00.033)       0:00:51.529 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.111)       0:00:51.641 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.035)       0:00:51.676 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.031)       0:00:51.708 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.031)       0:00:51.739 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.031)       0:00:51.771 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.033)       0:00:51.804 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.084)       0:00:51.889 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.062)       0:00:51.952 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.040)       0:00:51.992 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.040)       0:00:52.032 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.039)       0:00:52.072 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.021)       0:00:52.094 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.021)       0:00:52.116 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.019)       0:00:52.135 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.019)       0:00:52.154 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.019)       0:00:52.173 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.018)       0:00:52.192 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.017)       0:00:52.210 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.020)       0:00:52.230 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.018)       0:00:52.249 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.018)       0:00:52.267 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Wednesday 20 November 2024  12:05:56 -0500 (0:00:00.019)       0:00:52.287 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Wednesday 20 November 2024  12:05:57 -0500 (0:00:00.455)       0:00:52.742 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Wednesday 20 November 2024  12:05:57 -0500 (0:00:00.378)       0:00:53.120 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Wednesday 20 November 2024  12:05:57 -0500 (0:00:00.052)       0:00:53.173 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Wednesday 20 November 2024  12:05:57 -0500 (0:00:00.036)       0:00:53.209 **** 
ok: [managed-node3] => {
    "bytes": 10715943403,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.447)       0:00:53.657 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.101)       0:00:53.758 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.070)       0:00:53.829 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.085)       0:00:53.914 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.058)       0:00:53.973 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.022)       0:00:53.996 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.019)       0:00:54.015 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.018)       0:00:54.034 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.019)       0:00:54.054 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.018)       0:00:54.073 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.019)       0:00:54.092 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.017)       0:00:54.109 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.018)       0:00:54.128 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.021)       0:00:54.150 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.018)       0:00:54.168 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.049)       0:00:54.218 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.071)       0:00:54.290 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.025)       0:00:54.315 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.020)       0:00:54.336 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.022)       0:00:54.359 **** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.029)       0:00:54.388 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.023)       0:00:54.412 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Wednesday 20 November 2024  12:05:58 -0500 (0:00:00.053)       0:00:54.465 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.028460",
    "end": "2024-11-20 12:05:59.217182",
    "rc": 0,
    "start": "2024-11-20 12:05:59.188722"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.407)       0:00:54.873 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.067)       0:00:54.940 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.049)       0:00:54.990 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.042)       0:00:55.032 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.044)       0:00:55.077 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.045)       0:00:55.122 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.036)       0:00:55.159 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.019)       0:00:55.179 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.015)       0:00:55.195 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Remove the key file] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:134
Wednesday 20 November 2024  12:05:59 -0500 (0:00:00.026)       0:00:55.221 **** 
ok: [managed-node3] => {
    "changed": false,
    "path": "/tmp/storage_test7rjvd28_lukskey",
    "state": "absent"
}

TASK [Create a file] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/create-test-file.yml:12
Wednesday 20 November 2024  12:06:00 -0500 (0:00:00.633)       0:00:55.855 **** 
changed: [managed-node3] => {
    "changed": true,
    "dest": "/opt/test1/quux",
    "gid": 0,
    "group": "root",
    "mode": "0644",
    "owner": "root",
    "secontext": "unconfined_u:object_r:unlabeled_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Test for correct handling of safe_mode] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:143
Wednesday 20 November 2024  12:06:00 -0500 (0:00:00.387)       0:00:56.242 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node3

TASK [Store global variable value copy] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4
Wednesday 20 November 2024  12:06:00 -0500 (0:00:00.035)       0:00:56.277 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": true,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10
Wednesday 20 November 2024  12:06:00 -0500 (0:00:00.044)       0:00:56.322 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:06:00 -0500 (0:00:00.050)       0:00:56.372 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:06:00 -0500 (0:00:00.047)       0:00:56.419 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:06:00 -0500 (0:00:00.083)       0:00:56.503 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.092)       0:00:56.596 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.042)       0:00:56.638 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.037)       0:00:56.676 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.032)       0:00:56.708 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.022)       0:00:56.731 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.058)       0:00:56.789 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.033)       0:00:56.823 **** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.028)       0:00:56.851 **** 
ok: [managed-node3] => {
    "storage_volumes": []
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.026)       0:00:56.877 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.023)       0:00:56.901 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.022)       0:00:56.923 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.026)       0:00:56.950 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.025)       0:00:56.976 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.054)       0:00:57.030 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:06:01 -0500 (0:00:00.017)       0:00:57.048 **** 
fatal: [managed-node3]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

cannot remove and recreate existing pool 'foo' in safe mode

TASK [fedora.linux_system_roles.storage : Failed message] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:109
Wednesday 20 November 2024  12:06:03 -0500 (0:00:02.022)       0:00:59.071 **** 
fatal: [managed-node3]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "cannot remove and recreate existing pool 'foo' in safe mode", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': None, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': None, 'fs_label': None, 'fs_type': None, 'mount_options': None, 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': None, 'cached': None, 'cache_devices': [], 'cache_mode': None, 'cache_size': None, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:06:03 -0500 (0:00:00.048)       0:00:59.119 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23
Wednesday 20 November 2024  12:06:03 -0500 (0:00:00.032)       0:00:59.151 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28
Wednesday 20 November 2024  12:06:03 -0500 (0:00:00.038)       0:00:59.190 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39
Wednesday 20 November 2024  12:06:03 -0500 (0:00:00.046)       0:00:59.237 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_failed_exception is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the file] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml:11
Wednesday 20 November 2024  12:06:03 -0500 (0:00:00.054)       0:00:59.291 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122360.596464,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1732122360.596464,
        "dev": 64769,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 131,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0644",
        "mtime": 1732122360.596464,
        "nlink": 1,
        "path": "/opt/test1/quux",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3665790647",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Assert file presence] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml:16
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.392)       0:00:59.683 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Remove the encryption layer] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:165
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.029)       0:00:59.713 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.040)       0:00:59.754 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.033)       0:00:59.788 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.052)       0:00:59.840 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.069)       0:00:59.909 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.027)       0:00:59.937 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.029)       0:00:59.966 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.023)       0:00:59.989 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.059)       0:01:00.048 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.048)       0:01:00.097 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.025)       0:01:00.122 **** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.024)       0:01:00.146 **** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.024)       0:01:00.170 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.024)       0:01:00.195 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.024)       0:01:00.220 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.024)       0:01:00.244 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.027)       0:01:00.271 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.090)       0:01:00.362 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:06:04 -0500 (0:00:00.023)       0:01:00.385 **** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "luks"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
            "password": "-",
            "state": "absent"
        }
    ],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks2",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Wednesday 20 November 2024  12:06:08 -0500 (0:00:03.297)       0:01:03.683 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Wednesday 20 November 2024  12:06:08 -0500 (0:00:00.060)       0:01:03.744 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122341.9874616,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "f10e4308caa381eeaed8236f5d14bec971047187",
        "ctime": 1732122341.9844615,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 297795793,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1732122341.9844615,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "2100948536",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Wednesday 20 November 2024  12:06:08 -0500 (0:00:00.400)       0:01:04.144 **** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:06:08 -0500 (0:00:00.401)       0:01:04.546 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Wednesday 20 November 2024  12:06:08 -0500 (0:00:00.031)       0:01:04.577 **** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "luks"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
                "password": "-",
                "state": "absent"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks2",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Wednesday 20 November 2024  12:06:09 -0500 (0:00:00.045)       0:01:04.623 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks2",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Wednesday 20 November 2024  12:06:09 -0500 (0:00:00.040)       0:01:04.664 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Wednesday 20 November 2024  12:06:09 -0500 (0:00:00.040)       0:01:04.704 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=eb458450-3386-4608-bacc-da889e315bf0', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=eb458450-3386-4608-bacc-da889e315bf0"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Wednesday 20 November 2024  12:06:09 -0500 (0:00:00.442)       0:01:05.147 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Wednesday 20 November 2024  12:06:10 -0500 (0:00:00.785)       0:01:05.932 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Wednesday 20 November 2024  12:06:10 -0500 (0:00:00.437)       0:01:06.369 **** 
skipping: [managed-node3] => (item={'src': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Wednesday 20 November 2024  12:06:10 -0500 (0:00:00.060)       0:01:06.430 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Wednesday 20 November 2024  12:06:11 -0500 (0:00:00.772)       0:01:07.202 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122346.3544621,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "be1bd61a6b799d7c22660b358358b04c0716237e",
        "ctime": 1732122343.7844617,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 452985045,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0600",
        "mtime": 1732122343.785272,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 87,
        "uid": 0,
        "version": "877490320",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Wednesday 20 November 2024  12:06:11 -0500 (0:00:00.371)       0:01:07.573 **** 
changed: [managed-node3] => (item={'backing_device': '/dev/sda', 'name': 'luks-7257ace9-e57f-4dfc-9a4b-84a32979e624', 'password': '-', 'state': 'absent'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-7257ace9-e57f-4dfc-9a4b-84a32979e624",
        "password": "-",
        "state": "absent"
    },
    "found": 1
}

MSG:

1 line(s) removed

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Wednesday 20 November 2024  12:06:12 -0500 (0:00:00.435)       0:01:08.009 **** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:181
Wednesday 20 November 2024  12:06:13 -0500 (0:00:00.956)       0:01:08.966 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Wednesday 20 November 2024  12:06:13 -0500 (0:00:00.058)       0:01:09.024 **** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks2",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Wednesday 20 November 2024  12:06:13 -0500 (0:00:00.074)       0:01:09.098 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Wednesday 20 November 2024  12:06:13 -0500 (0:00:00.059)       0:01:09.158 **** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "vHqkdQ-QrZd-FxRB-6S4j-GSQM-G3aC-maphq2"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "63814bf2-dbd4-439c-b63b-6d05ca07d081"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Wednesday 20 November 2024  12:06:13 -0500 (0:00:00.391)       0:01:09.550 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004092",
    "end": "2024-11-20 12:06:15.279645",
    "rc": 0,
    "start": "2024-11-20 12:06:14.275553"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Nov 12 09:07:12 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=63814bf2-dbd4-439c-b63b-6d05ca07d081 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Wednesday 20 November 2024  12:06:15 -0500 (0:00:01.384)       0:01:10.935 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003257",
    "end": "2024-11-20 12:06:15.665587",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-20 12:06:15.662330"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Wednesday 20 November 2024  12:06:15 -0500 (0:00:00.384)       0:01:11.319 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks2', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Wednesday 20 November 2024  12:06:15 -0500 (0:00:00.103)       0:01:11.423 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Wednesday 20 November 2024  12:06:15 -0500 (0:00:00.033)       0:01:11.456 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.019446",
    "end": "2024-11-20 12:06:16.217092",
    "rc": 0,
    "start": "2024-11-20 12:06:16.197646"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.403)       0:01:11.859 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.033)       0:01:11.893 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.047)       0:01:11.941 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.048)       0:01:11.989 **** 
ok: [managed-node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.349)       0:01:12.338 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.040)       0:01:12.378 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.045)       0:01:12.424 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.039)       0:01:12.464 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.022)       0:01:12.486 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.044)       0:01:12.530 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.017)       0:01:12.548 **** 
ok: [managed-node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Wednesday 20 November 2024  12:06:16 -0500 (0:00:00.025)       0:01:12.573 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True
sys:1: DeprecationWarning: builtin type swigvarlink has no __module__ attribute



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.45.112 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eac19bf222'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.45.112 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.371)       0:01:12.945 **** 
skipping: [managed-node3] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "grow_supported.stdout | trim == 'True'",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.040)       0:01:12.986 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.039)       0:01:13.025 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.017)       0:01:13.043 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.019)       0:01:13.062 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.045)       0:01:13.107 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.018)       0:01:13.126 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.018)       0:01:13.144 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.017)       0:01:13.162 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.018)       0:01:13.180 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.017)       0:01:13.198 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.019)       0:01:13.218 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.016)       0:01:13.235 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.018)       0:01:13.254 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.037)       0:01:13.291 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.036)       0:01:13.327 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.021)       0:01:13.349 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.022)       0:01:13.371 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.022)       0:01:13.393 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.023)       0:01:13.416 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.022)       0:01:13.439 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.021)       0:01:13.460 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.022)       0:01:13.483 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.041)       0:01:13.525 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.040)       0:01:13.565 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Wednesday 20 November 2024  12:06:17 -0500 (0:00:00.018)       0:01:13.584 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.019)       0:01:13.604 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.019)       0:01:13.623 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.022)       0:01:13.645 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.042)       0:01:13.688 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.040)       0:01:13.728 **** 
skipping: [managed-node3] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.021)       0:01:13.750 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node3 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.035)       0:01:13.786 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.043)       0:01:13.829 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.043)       0:01:13.873 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.039)       0:01:13.912 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.037)       0:01:13.950 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.043)       0:01:13.994 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.021)       0:01:14.016 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.021)       0:01:14.037 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.043)       0:01:14.080 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.039)       0:01:14.120 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.021)       0:01:14.141 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.019)       0:01:14.160 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.019)       0:01:14.180 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.017)       0:01:14.198 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.019)       0:01:14.217 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.018)       0:01:14.236 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.020)       0:01:14.257 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.046)       0:01:14.303 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.020)       0:01:14.324 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.018)       0:01:14.343 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.017)       0:01:14.360 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.017)       0:01:14.378 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.017)       0:01:14.396 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.019)       0:01:14.415 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.049)       0:01:14.465 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.034)       0:01:14.499 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Wednesday 20 November 2024  12:06:18 -0500 (0:00:00.045)       0:01:14.545 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.106)       0:01:14.652 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.028)       0:01:14.680 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.045)       0:01:14.725 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.019)       0:01:14.744 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.023)       0:01:14.768 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.018)       0:01:14.787 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.017)       0:01:14.805 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.018)       0:01:14.823 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.018)       0:01:14.842 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.017)       0:01:14.859 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.018)       0:01:14.877 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.019)       0:01:14.897 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.059)       0:01:14.956 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.045)       0:01:15.001 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.046)       0:01:15.047 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.036)       0:01:15.083 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.022)       0:01:15.106 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.019)       0:01:15.126 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.042)       0:01:15.169 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.045)       0:01:15.215 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122367.9654648,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1732122367.9654648,
        "dev": 6,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5557,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1732122367.9654648,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Wednesday 20 November 2024  12:06:19 -0500 (0:00:00.356)       0:01:15.571 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.025)       0:01:15.597 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.018)       0:01:15.616 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.023)       0:01:15.640 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.021)       0:01:15.661 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.018)       0:01:15.680 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.024)       0:01:15.705 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.017)       0:01:15.723 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.667)       0:01:16.390 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.018)       0:01:16.409 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.017)       0:01:16.427 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.045)       0:01:16.472 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.020)       0:01:16.492 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.018)       0:01:16.511 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.018)       0:01:16.530 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.018)       0:01:16.548 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Wednesday 20 November 2024  12:06:20 -0500 (0:00:00.020)       0:01:16.568 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.049)       0:01:16.618 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.046)       0:01:16.664 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.036)       0:01:16.701 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.037)       0:01:16.739 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.036)       0:01:16.776 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.020)       0:01:16.796 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.017)       0:01:16.814 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.019)       0:01:16.834 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.017)       0:01:16.852 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.017)       0:01:16.869 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.018)       0:01:16.888 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.017)       0:01:16.905 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.018)       0:01:16.924 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.020)       0:01:16.944 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.017)       0:01:16.961 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.017)       0:01:16.979 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Wednesday 20 November 2024  12:06:21 -0500 (0:00:00.365)       0:01:17.344 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.365)       0:01:17.710 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.045)       0:01:17.755 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.052)       0:01:17.808 **** 
ok: [managed-node3] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.368)       0:01:18.177 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.040)       0:01:18.217 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.037)       0:01:18.255 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.037)       0:01:18.293 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.040)       0:01:18.333 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.019)       0:01:18.353 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.370 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.016)       0:01:18.387 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.405 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.423 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.440 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.019)       0:01:18.460 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.477 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.495 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.513 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.531 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.548 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.019)       0:01:18.567 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Wednesday 20 November 2024  12:06:22 -0500 (0:00:00.017)       0:01:18.584 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.023)       0:01:18.608 **** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.021)       0:01:18.630 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.021)       0:01:18.652 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.046)       0:01:18.698 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.027040",
    "end": "2024-11-20 12:06:23.437235",
    "rc": 0,
    "start": "2024-11-20 12:06:23.410195"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.384)       0:01:19.083 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.041)       0:01:19.124 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.042)       0:01:19.166 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.035)       0:01:19.202 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.036)       0:01:19.239 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.038)       0:01:19.277 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.041)       0:01:19.319 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.020)       0:01:19.339 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.016)       0:01:19.355 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create a file] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/create-test-file.yml:12
Wednesday 20 November 2024  12:06:23 -0500 (0:00:00.018)       0:01:19.374 **** 
changed: [managed-node3] => {
    "changed": true,
    "dest": "/opt/test1/quux",
    "gid": 0,
    "group": "root",
    "mode": "0644",
    "owner": "root",
    "secontext": "unconfined_u:object_r:unlabeled_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Test for correct handling of safe_mode] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:187
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.358)       0:01:19.733 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node3

TASK [Store global variable value copy] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.036)       0:01:19.770 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": true,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.040)       0:01:19.810 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.031)       0:01:19.841 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.028)       0:01:19.870 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.038)       0:01:19.909 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.048)       0:01:19.958 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.023)       0:01:19.981 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.021)       0:01:20.003 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.020)       0:01:20.024 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.019)       0:01:20.043 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.046)       0:01:20.090 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.024)       0:01:20.114 **** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.043)       0:01:20.158 **** 
ok: [managed-node3] => {
    "storage_volumes": []
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.041)       0:01:20.200 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.021)       0:01:20.222 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.022)       0:01:20.245 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.021)       0:01:20.266 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.023)       0:01:20.290 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.050)       0:01:20.341 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:06:24 -0500 (0:00:00.017)       0:01:20.358 **** 
fatal: [managed-node3]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

cannot remove and recreate existing pool 'foo' in safe mode

TASK [fedora.linux_system_roles.storage : Failed message] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:109
Wednesday 20 November 2024  12:06:26 -0500 (0:00:01.851)       0:01:22.209 **** 
fatal: [managed-node3]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "cannot remove and recreate existing pool 'foo' in safe mode", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': True, 'encryption_cipher': 'aes-xts-plain64', 'encryption_key': None, 'encryption_key_size': 512, 'encryption_luks_version': 'luks1', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': None, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': None, 'fs_label': None, 'fs_type': None, 'mount_options': None, 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': None, 'cached': None, 'cache_devices': [], 'cache_mode': None, 'cache_size': None, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': True, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:06:26 -0500 (0:00:00.027)       0:01:22.237 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23
Wednesday 20 November 2024  12:06:26 -0500 (0:00:00.018)       0:01:22.255 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28
Wednesday 20 November 2024  12:06:26 -0500 (0:00:00.051)       0:01:22.306 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39
Wednesday 20 November 2024  12:06:26 -0500 (0:00:00.030)       0:01:22.337 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_failed_exception is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the file] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml:11
Wednesday 20 November 2024  12:06:26 -0500 (0:00:00.026)       0:01:22.363 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122384.088467,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1732122384.088467,
        "dev": 64768,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 131,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0644",
        "mtime": 1732122384.088467,
        "nlink": 1,
        "path": "/opt/test1/quux",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "1472405938",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Assert file presence] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml:16
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.377)       0:01:22.740 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Add encryption to the pool] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:212
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.033)       0:01:22.774 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.057)       0:01:22.831 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.028)       0:01:22.860 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.042)       0:01:22.902 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.052)       0:01:22.955 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.022)       0:01:22.977 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.022)       0:01:22.999 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.020)       0:01:23.019 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.019)       0:01:23.039 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.048)       0:01:23.087 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.024)       0:01:23.112 **** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.045)       0:01:23.157 **** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.021)       0:01:23.179 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.024)       0:01:23.203 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.022)       0:01:23.226 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.022)       0:01:23.249 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.021)       0:01:23.270 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.049)       0:01:23.320 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:06:27 -0500 (0:00:00.016)       0:01:23.336 **** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "luks"
        },
        {
            "action": "create device",
            "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "password": "-",
            "state": "present"
        }
    ],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs",
        "cryptsetup"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Wednesday 20 November 2024  12:06:40 -0500 (0:00:12.437)       0:01:35.774 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Wednesday 20 November 2024  12:06:40 -0500 (0:00:00.062)       0:01:35.836 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122370.7144651,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "b899fa0a599185458f4a4b97974b88dc856a722d",
        "ctime": 1732122370.7114651,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 297795793,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1732122370.7114651,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "2100948536",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Wednesday 20 November 2024  12:06:40 -0500 (0:00:00.389)       0:01:36.225 **** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:06:41 -0500 (0:00:00.381)       0:01:36.607 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Wednesday 20 November 2024  12:06:41 -0500 (0:00:00.027)       0:01:36.634 **** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "luks"
            },
            {
                "action": "create device",
                "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "password": "-",
                "state": "present"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs",
            "cryptsetup"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": "aes-xts-plain64",
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 512,
                "encryption_luks_version": "luks1",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Wednesday 20 November 2024  12:06:41 -0500 (0:00:00.049)       0:01:36.684 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": "aes-xts-plain64",
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 512,
                "encryption_luks_version": "luks1",
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Wednesday 20 November 2024  12:06:41 -0500 (0:00:00.040)       0:01:36.724 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Wednesday 20 November 2024  12:06:41 -0500 (0:00:00.039)       0:01:36.764 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=23d4f50f-f66f-4b5b-9bb7-c6f99e9a9491"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Wednesday 20 November 2024  12:06:41 -0500 (0:00:00.433)       0:01:37.198 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Wednesday 20 November 2024  12:06:42 -0500 (0:00:00.769)       0:01:37.967 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Wednesday 20 November 2024  12:06:42 -0500 (0:00:00.468)       0:01:38.435 **** 
skipping: [managed-node3] => (item={'src': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Wednesday 20 November 2024  12:06:42 -0500 (0:00:00.080)       0:01:38.516 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Wednesday 20 November 2024  12:06:43 -0500 (0:00:00.772)       0:01:39.288 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122375.6644657,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1732122372.3464653,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 696254732,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1732122372.3473866,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "988533418",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Wednesday 20 November 2024  12:06:44 -0500 (0:00:00.363)       0:01:39.652 **** 
changed: [managed-node3] => (item={'backing_device': '/dev/sda', 'name': 'luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da', 'password': '-', 'state': 'present'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
        "password": "-",
        "state": "present"
    }
}

MSG:

line added

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Wednesday 20 November 2024  12:06:44 -0500 (0:00:00.403)       0:01:40.055 **** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:231
Wednesday 20 November 2024  12:06:45 -0500 (0:00:00.977)       0:01:41.033 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Wednesday 20 November 2024  12:06:45 -0500 (0:00:00.063)       0:01:41.097 **** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": "aes-xts-plain64",
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 512,
            "encryption_luks_version": "luks1",
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Wednesday 20 November 2024  12:06:45 -0500 (0:00:00.072)       0:01:41.170 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Wednesday 20 November 2024  12:06:45 -0500 (0:00:00.063)       0:01:41.233 **** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "ca5740d8-1199-4001-9345-528c8e9465b7"
        },
        "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "size": "10G",
            "type": "crypt",
            "uuid": "gKLeaO-F2W1-m4c2-WqPE-nFGU-o2Gd-MS93lX"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "ffbfa3bd-c7c7-4b9e-adde-557677df23da"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "63814bf2-dbd4-439c-b63b-6d05ca07d081"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Wednesday 20 November 2024  12:06:46 -0500 (0:00:00.407)       0:01:41.641 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003033",
    "end": "2024-11-20 12:06:46.372855",
    "rc": 0,
    "start": "2024-11-20 12:06:46.369822"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Nov 12 09:07:12 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=63814bf2-dbd4-439c-b63b-6d05ca07d081 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=ca5740d8-1199-4001-9345-528c8e9465b7 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Wednesday 20 November 2024  12:06:46 -0500 (0:00:00.373)       0:01:42.014 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003182",
    "end": "2024-11-20 12:06:46.722241",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-20 12:06:46.719059"
}

STDOUT:

luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da /dev/sda -

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Wednesday 20 November 2024  12:06:46 -0500 (0:00:00.364)       0:01:42.379 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': 'aes-xts-plain64', 'encryption_key': None, 'encryption_key_size': 512, 'encryption_luks_version': 'luks1', 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Wednesday 20 November 2024  12:06:46 -0500 (0:00:00.092)       0:01:42.472 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Wednesday 20 November 2024  12:06:46 -0500 (0:00:00.033)       0:01:42.505 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028145",
    "end": "2024-11-20 12:06:47.264159",
    "rc": 0,
    "start": "2024-11-20 12:06:47.236014"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Wednesday 20 November 2024  12:06:47 -0500 (0:00:00.413)       0:01:42.919 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Wednesday 20 November 2024  12:06:47 -0500 (0:00:00.047)       0:01:42.966 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Wednesday 20 November 2024  12:06:47 -0500 (0:00:00.071)       0:01:43.038 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Wednesday 20 November 2024  12:06:47 -0500 (0:00:00.176)       0:01:43.215 **** 
ok: [managed-node3] => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
    "pv": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.392)       0:01:43.608 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.065)       0:01:43.673 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.068)       0:01:43.741 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.069)       0:01:43.811 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "crypt"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.039)       0:01:43.850 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.036)       0:01:43.887 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.032)       0:01:43.919 **** 
ok: [managed-node3] => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.059)       0:01:43.979 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True
sys:1: DeprecationWarning: builtin type swigvarlink has no __module__ attribute



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.45.112 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eac19bf222'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.45.112 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.396)       0:01:44.375 **** 
skipping: [managed-node3] => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "grow_supported.stdout | trim == 'True'",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.047)       0:01:44.423 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.046)       0:01:44.469 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.019)       0:01:44.488 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.018)       0:01:44.507 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.018)       0:01:44.526 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.018)       0:01:44.544 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.019)       0:01:44.564 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Wednesday 20 November 2024  12:06:48 -0500 (0:00:00.016)       0:01:44.581 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.018)       0:01:44.599 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.017)       0:01:44.617 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.018)       0:01:44.635 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.018)       0:01:44.653 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.021)       0:01:44.675 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.037)       0:01:44.712 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.036)       0:01:44.749 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.022)       0:01:44.772 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.022)       0:01:44.794 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.021)       0:01:44.816 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.021)       0:01:44.837 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.022)       0:01:44.859 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.024)       0:01:44.884 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.022)       0:01:44.906 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.038)       0:01:44.945 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.042)       0:01:44.988 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.018)       0:01:45.007 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.018)       0:01:45.025 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.017)       0:01:45.043 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.020)       0:01:45.063 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.043)       0:01:45.106 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.039)       0:01:45.145 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml for managed-node3 => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da)

TASK [Get the backing device path] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:2
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.034)       0:01:45.180 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/disk/by-uuid/ffbfa3bd-c7c7-4b9e-adde-557677df23da"
    ],
    "delta": "0:00:00.003692",
    "end": "2024-11-20 12:06:49.908963",
    "rc": 0,
    "start": "2024-11-20 12:06:49.905271"
}

STDOUT:

/dev/sda

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:12
Wednesday 20 November 2024  12:06:49 -0500 (0:00:00.370)       0:01:45.551 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this member] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:18
Wednesday 20 November 2024  12:06:50 -0500 (0:00:00.674)       0:01:46.225 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cryptsetup",
        "luksDump",
        "/dev/sda"
    ],
    "delta": "0:00:00.007010",
    "end": "2024-11-20 12:06:50.944718",
    "rc": 0,
    "start": "2024-11-20 12:06:50.937708"
}

STDOUT:

LUKS header information for /dev/sda

Version:       	1
Cipher name:   	aes
Cipher mode:   	xts-plain64
Hash spec:     	sha256
Payload offset:	16384
MK bits:       	512
MK digest:     	c1 f9 20 83 c0 f1 02 b3 ec eb 6a 24 3f d7 5f fb bb 2d 00 b7 
MK salt:       	76 86 26 45 4d 57 f3 d6 f4 6c 4c ff ec 58 ca 4d 
               	26 50 3b d1 05 ff c0 b8 71 f8 c7 0d cb 0a 9f e4 
MK iterations: 	105703
UUID:          	ffbfa3bd-c7c7-4b9e-adde-557677df23da

Key Slot 0: ENABLED
	Iterations:         	1688528
	Salt:               	62 de 63 9c 59 48 10 12 80 e1 ed 3c 3a 9d 4a 7c 
	                      	98 ae 8d ca 9f 31 65 57 16 e4 d9 5a 64 bb 59 b1 
	Key material offset:	8
	AF stripes:            	4000
Key Slot 1: DISABLED
Key Slot 2: DISABLED
Key Slot 3: DISABLED
Key Slot 4: DISABLED
Key Slot 5: DISABLED
Key Slot 6: DISABLED
Key Slot 7: DISABLED

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:26
Wednesday 20 November 2024  12:06:50 -0500 (0:00:00.360)       0:01:46.586 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:38
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.047)       0:01:46.634 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:50
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.029)       0:01:46.663 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.045)       0:01:46.708 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node3 => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.040)       0:01:46.749 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [
            "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da /dev/sda -"
        ]
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.044)       0:01:46.793 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.041)       0:01:46.834 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.043)       0:01:46.878 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.036)       0:01:46.915 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.046)       0:01:46.961 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.059)       0:01:47.021 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.020)       0:01:47.042 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.043)       0:01:47.085 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.037)       0:01:47.122 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.019)       0:01:47.141 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.018)       0:01:47.160 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.017)       0:01:47.178 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.018)       0:01:47.196 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.018)       0:01:47.215 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.019)       0:01:47.234 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.019)       0:01:47.253 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.046)       0:01:47.300 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.018)       0:01:47.318 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.019)       0:01:47.338 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.017)       0:01:47.356 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.017)       0:01:47.373 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.017)       0:01:47.390 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.019)       0:01:47.409 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.019)       0:01:47.429 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.034)       0:01:47.464 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Wednesday 20 November 2024  12:06:51 -0500 (0:00:00.041)       0:01:47.506 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.105)       0:01:47.611 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.028)       0:01:47.639 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.044)       0:01:47.684 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.020)       0:01:47.705 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.024)       0:01:47.729 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.018)       0:01:47.747 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.017)       0:01:47.765 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.018)       0:01:47.784 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.018)       0:01:47.802 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.020)       0:01:47.823 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.017)       0:01:47.841 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.019)       0:01:47.860 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=ca5740d8-1199-4001-9345-528c8e9465b7 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.057)       0:01:47.917 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.041)       0:01:47.959 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.044)       0:01:48.003 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.045)       0:01:48.048 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.022)       0:01:48.071 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.019)       0:01:48.090 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.043)       0:01:48.134 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.042)       0:01:48.176 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122400.0404687,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1732122400.0404687,
        "dev": 6,
        "device_type": 64769,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5602,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1732122400.0404687,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.368)       0:01:48.545 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Wednesday 20 November 2024  12:06:52 -0500 (0:00:00.039)       0:01:48.584 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.023)       0:01:48.608 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.025)       0:01:48.633 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.024)       0:01:48.657 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.024)       0:01:48.682 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.027)       0:01:48.709 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.020)       0:01:48.730 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.685)       0:01:49.416 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.031)       0:01:49.447 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.029)       0:01:49.477 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.068)       0:01:49.546 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Wednesday 20 November 2024  12:06:53 -0500 (0:00:00.030)       0:01:49.576 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.080)       0:01:49.657 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.033)       0:01:49.691 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.031)       0:01:49.722 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.031)       0:01:49.754 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.057)       0:01:49.812 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.052)       0:01:49.864 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.053)       0:01:49.918 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.039)       0:01:49.957 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.037)       0:01:49.995 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.021)       0:01:50.016 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.018)       0:01:50.034 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.024)       0:01:50.058 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.020)       0:01:50.079 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.017)       0:01:50.097 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.019)       0:01:50.116 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.019)       0:01:50.135 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.021)       0:01:50.156 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.027)       0:01:50.183 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.032)       0:01:50.215 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Wednesday 20 November 2024  12:06:54 -0500 (0:00:00.030)       0:01:50.246 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Wednesday 20 November 2024  12:06:55 -0500 (0:00:00.376)       0:01:50.622 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Wednesday 20 November 2024  12:06:55 -0500 (0:00:00.365)       0:01:50.988 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Wednesday 20 November 2024  12:06:55 -0500 (0:00:00.047)       0:01:51.035 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Wednesday 20 November 2024  12:06:55 -0500 (0:00:00.037)       0:01:51.072 **** 
ok: [managed-node3] => {
    "bytes": 10715943403,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Wednesday 20 November 2024  12:06:55 -0500 (0:00:00.408)       0:01:51.480 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Wednesday 20 November 2024  12:06:55 -0500 (0:00:00.040)       0:01:51.521 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Wednesday 20 November 2024  12:06:55 -0500 (0:00:00.038)       0:01:51.559 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.039)       0:01:51.598 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.038)       0:01:51.636 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.655 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.020)       0:01:51.675 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.694 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.713 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.017)       0:01:51.730 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.748 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.767 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.020)       0:01:51.788 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.017)       0:01:51.806 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.824 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.017)       0:01:51.841 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.860 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.878 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.019)       0:01:51.898 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.018)       0:01:51.917 **** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.021)       0:01:51.939 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.021)       0:01:51.961 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.043)       0:01:52.004 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.023807",
    "end": "2024-11-20 12:06:56.749154",
    "rc": 0,
    "start": "2024-11-20 12:06:56.725347"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.386)       0:01:52.391 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.048)       0:01:52.440 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.044)       0:01:52.484 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Wednesday 20 November 2024  12:06:56 -0500 (0:00:00.056)       0:01:52.541 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.060)       0:01:52.601 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.059)       0:01:52.661 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.056)       0:01:52.718 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.026)       0:01:52.744 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.022)       0:01:52.767 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create a file] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/create-test-file.yml:12
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.021)       0:01:52.789 **** 
changed: [managed-node3] => {
    "changed": true,
    "dest": "/opt/test1/quux",
    "gid": 0,
    "group": "root",
    "mode": "0644",
    "owner": "root",
    "secontext": "unconfined_u:object_r:unlabeled_t:s0",
    "size": 0,
    "state": "file",
    "uid": 0
}

TASK [Change the mountpoint, leaving encryption in place] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:237
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.366)       0:01:53.155 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.056)       0:01:53.211 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.027)       0:01:53.239 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.040)       0:01:53.280 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.049)       0:01:53.330 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.057)       0:01:53.388 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.024)       0:01:53.412 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.021)       0:01:53.433 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.019)       0:01:53.453 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.047)       0:01:53.500 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.023)       0:01:53.524 **** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "mount_point": "/opt/test2",
                    "name": "test1"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.024)       0:01:53.548 **** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:06:57 -0500 (0:00:00.022)       0:01:53.571 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:06:58 -0500 (0:00:00.024)       0:01:53.595 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:06:58 -0500 (0:00:00.026)       0:01:53.621 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:06:58 -0500 (0:00:00.028)       0:01:53.650 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:06:58 -0500 (0:00:00.034)       0:01:53.684 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:06:58 -0500 (0:00:00.083)       0:01:53.768 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:06:58 -0500 (0:00:00.030)       0:01:53.798 **** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "path": "/opt/test1",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test2",
            "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "cryptsetup",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 4294967296,
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Wednesday 20 November 2024  12:07:00 -0500 (0:00:02.070)       0:01:55.869 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Wednesday 20 November 2024  12:07:00 -0500 (0:00:00.058)       0:01:55.927 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122402.769469,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "36ec179b5b68ac3c198db721589f589022e01ee1",
        "ctime": 1732122402.765469,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 297795793,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1732122402.765469,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "2100948536",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Wednesday 20 November 2024  12:07:00 -0500 (0:00:00.375)       0:01:56.303 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:07:00 -0500 (0:00:00.039)       0:01:56.343 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Wednesday 20 November 2024  12:07:00 -0500 (0:00:00.028)       0:01:56.371 **** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "path": "/opt/test1",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test2",
                "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "cryptsetup",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 4294967296,
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Wednesday 20 November 2024  12:07:00 -0500 (0:00:00.039)       0:01:56.411 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-1",
                        "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 4294967296,
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Wednesday 20 November 2024  12:07:00 -0500 (0:00:00.039)       0:01:56.450 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Wednesday 20 November 2024  12:07:00 -0500 (0:00:00.037)       0:01:56.487 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'path': '/opt/test1', 'state': 'absent'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "mount_info": {
        "path": "/opt/test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Wednesday 20 November 2024  12:07:01 -0500 (0:00:00.460)       0:01:56.947 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Wednesday 20 November 2024  12:07:02 -0500 (0:00:00.793)       0:01:57.741 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
        "state": "mounted"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Wednesday 20 November 2024  12:07:02 -0500 (0:00:00.468)       0:01:58.209 **** 
skipping: [managed-node3] => (item={'src': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Wednesday 20 November 2024  12:07:02 -0500 (0:00:00.074)       0:01:58.284 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Wednesday 20 November 2024  12:07:03 -0500 (0:00:00.784)       0:01:59.068 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122406.7204695,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "8d087bcc23525dee9e0fb6bcb9d4be652a38264f",
        "ctime": 1732122404.3934693,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 205521109,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0600",
        "mtime": 1732122404.3940144,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 53,
        "uid": 0,
        "version": "465084235",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Wednesday 20 November 2024  12:07:03 -0500 (0:00:00.422)       0:01:59.491 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Wednesday 20 November 2024  12:07:03 -0500 (0:00:00.029)       0:01:59.521 **** 
ok: [managed-node3]

TASK [Assert to implicitly preserve encryption on existing pool] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:249
Wednesday 20 November 2024  12:07:04 -0500 (0:00:00.985)       0:02:00.506 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the file] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml:11
Wednesday 20 November 2024  12:07:04 -0500 (0:00:00.041)       0:02:00.548 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122417.511471,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1732122417.511471,
        "dev": 64769,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 131,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0644",
        "mtime": 1732122417.511471,
        "nlink": 1,
        "path": "/opt/test2/quux",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "4148762174",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Assert file presence] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-data-preservation.yml:16
Wednesday 20 November 2024  12:07:05 -0500 (0:00:00.404)       0:02:00.953 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify role results] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:259
Wednesday 20 November 2024  12:07:05 -0500 (0:00:00.043)       0:02:00.996 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Wednesday 20 November 2024  12:07:05 -0500 (0:00:00.072)       0:02:01.069 **** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-1",
                    "_mount_id": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 4294967296,
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Wednesday 20 November 2024  12:07:05 -0500 (0:00:00.078)       0:02:01.148 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Wednesday 20 November 2024  12:07:05 -0500 (0:00:00.062)       0:02:01.210 **** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test2",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "ca5740d8-1199-4001-9345-528c8e9465b7"
        },
        "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "size": "10G",
            "type": "crypt",
            "uuid": "gKLeaO-F2W1-m4c2-WqPE-nFGU-o2Gd-MS93lX"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "ffbfa3bd-c7c7-4b9e-adde-557677df23da"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "63814bf2-dbd4-439c-b63b-6d05ca07d081"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Wednesday 20 November 2024  12:07:06 -0500 (0:00:00.411)       0:02:01.621 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003075",
    "end": "2024-11-20 12:07:06.356728",
    "rc": 0,
    "start": "2024-11-20 12:07:06.353653"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Nov 12 09:07:12 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=63814bf2-dbd4-439c-b63b-6d05ca07d081 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=ca5740d8-1199-4001-9345-528c8e9465b7 /opt/test2 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Wednesday 20 November 2024  12:07:06 -0500 (0:00:00.391)       0:02:02.013 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003521",
    "end": "2024-11-20 12:07:06.737977",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-20 12:07:06.734456"
}

STDOUT:

luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da /dev/sda -

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Wednesday 20 November 2024  12:07:06 -0500 (0:00:00.367)       0:02:02.381 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks1', 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Wednesday 20 November 2024  12:07:06 -0500 (0:00:00.085)       0:02:02.466 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Wednesday 20 November 2024  12:07:06 -0500 (0:00:00.034)       0:02:02.501 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.028465",
    "end": "2024-11-20 12:07:07.266252",
    "rc": 0,
    "start": "2024-11-20 12:07:07.237787"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.417)       0:02:02.918 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.028)       0:02:02.947 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.055)       0:02:03.003 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.052)       0:02:03.055 **** 
ok: [managed-node3] => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
    "pv": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.354)       0:02:03.410 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.040)       0:02:03.450 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.048)       0:02:03.498 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.044)       0:02:03.542 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "crypt"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Wednesday 20 November 2024  12:07:07 -0500 (0:00:00.024)       0:02:03.567 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.023)       0:02:03.591 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.019)       0:02:03.611 **** 
ok: [managed-node3] => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.039)       0:02:03.650 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0
}

STDOUT:

True
sys:1: DeprecationWarning: builtin type swigvarlink has no __module__ attribute



STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.45.112 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.45.112 originally 10.31.45.112
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/eac19bf222'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.45.112 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:73
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.410)       0:02:04.061 **** 
skipping: [managed-node3] => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "grow_supported.stdout | trim == 'True'",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:83
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.181)       0:02:04.242 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.070)       0:02:04.313 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.035)       0:02:04.348 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.029)       0:02:04.377 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.031)       0:02:04.409 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.030)       0:02:04.440 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.029)       0:02:04.469 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.031)       0:02:04.501 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.029)       0:02:04.530 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Wednesday 20 November 2024  12:07:08 -0500 (0:00:00.029)       0:02:04.559 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.032)       0:02:04.592 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.030)       0:02:04.622 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:86
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.032)       0:02:04.655 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.065)       0:02:04.721 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.063)       0:02:04.784 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.037)       0:02:04.821 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.038)       0:02:04.860 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.036)       0:02:04.896 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.040)       0:02:04.937 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.037)       0:02:04.974 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.037)       0:02:05.012 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:89
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.037)       0:02:05.050 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.067)       0:02:05.117 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.061)       0:02:05.178 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.031)       0:02:05.209 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.026)       0:02:05.236 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.030)       0:02:05.267 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:92
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.040)       0:02:05.308 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.077)       0:02:05.385 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.065)       0:02:05.450 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml for managed-node3 => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da)

TASK [Get the backing device path] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:2
Wednesday 20 November 2024  12:07:09 -0500 (0:00:00.060)       0:02:05.511 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "realpath",
        "/dev/disk/by-uuid/ffbfa3bd-c7c7-4b9e-adde-557677df23da"
    ],
    "delta": "0:00:00.003037",
    "end": "2024-11-20 12:07:10.280700",
    "rc": 0,
    "start": "2024-11-20 12:07:10.277663"
}

STDOUT:

/dev/sda

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:12
Wednesday 20 November 2024  12:07:10 -0500 (0:00:00.422)       0:02:05.934 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this member] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:18
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.721)       0:02:06.655 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cryptsetup",
        "luksDump",
        "/dev/sda"
    ],
    "delta": "0:00:00.007289",
    "end": "2024-11-20 12:07:11.402007",
    "rc": 0,
    "start": "2024-11-20 12:07:11.394718"
}

STDOUT:

LUKS header information for /dev/sda

Version:       	1
Cipher name:   	aes
Cipher mode:   	xts-plain64
Hash spec:     	sha256
Payload offset:	16384
MK bits:       	512
MK digest:     	c1 f9 20 83 c0 f1 02 b3 ec eb 6a 24 3f d7 5f fb bb 2d 00 b7 
MK salt:       	76 86 26 45 4d 57 f3 d6 f4 6c 4c ff ec 58 ca 4d 
               	26 50 3b d1 05 ff c0 b8 71 f8 c7 0d cb 0a 9f e4 
MK iterations: 	105703
UUID:          	ffbfa3bd-c7c7-4b9e-adde-557677df23da

Key Slot 0: ENABLED
	Iterations:         	1688528
	Salt:               	62 de 63 9c 59 48 10 12 80 e1 ed 3c 3a 9d 4a 7c 
	                      	98 ae 8d ca 9f 31 65 57 16 e4 d9 5a 64 bb 59 b1 
	Key material offset:	8
	AF stripes:            	4000
Key Slot 1: DISABLED
Key Slot 2: DISABLED
Key Slot 3: DISABLED
Key Slot 4: DISABLED
Key Slot 5: DISABLED
Key Slot 6: DISABLED
Key Slot 7: DISABLED

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:26
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.401)       0:02:07.057 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:38
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.074)       0:02:07.131 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_key_size",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-encryption.yml:50
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.035)       0:02:07.167 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_cipher",
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.036)       0:02:07.203 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node3 => (item=/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.063)       0:02:07.267 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [
            "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da /dev/sda -"
        ]
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.079)       0:02:07.347 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.064)       0:02:07.412 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.081)       0:02:07.493 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Wednesday 20 November 2024  12:07:11 -0500 (0:00:00.061)       0:02:07.555 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.072)       0:02:07.628 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.028)       0:02:07.657 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:95
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.021)       0:02:07.678 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.044)       0:02:07.723 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.040)       0:02:07.763 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.019)       0:02:07.782 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.018)       0:02:07.801 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.057)       0:02:07.859 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.021)       0:02:07.880 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.020)       0:02:07.901 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.019)       0:02:07.920 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:98
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.020)       0:02:07.941 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.048)       0:02:07.989 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.019)       0:02:08.009 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.018)       0:02:08.027 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.017)       0:02:08.045 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.017)       0:02:08.063 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.020)       0:02:08.084 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:101
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.019)       0:02:08.103 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.019)       0:02:08.123 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 4294967296, 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', '_kernel_device': '/dev/dm-1', '_raw_kernel_device': '/dev/dm-1'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.032)       0:02:08.155 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.047)       0:02:08.203 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.104)       0:02:08.307 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.029)       0:02:08.337 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.049)       0:02:08.386 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.019)       0:02:08.406 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.038)       0:02:08.444 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.021)       0:02:08.465 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.023)       0:02:08.489 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.020)       0:02:08.510 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.019)       0:02:08.529 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.022)       0:02:08.551 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Wednesday 20 November 2024  12:07:12 -0500 (0:00:00.020)       0:02:08.572 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.021)       0:02:08.594 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=ca5740d8-1199-4001-9345-528c8e9465b7 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test2 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test2 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.066)       0:02:08.661 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.046)       0:02:08.707 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.049)       0:02:08.756 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.054)       0:02:08.810 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.029)       0:02:08.840 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.019)       0:02:08.859 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.051)       0:02:08.911 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.045)       0:02:08.956 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122400.0404687,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1732122400.0404687,
        "dev": 6,
        "device_type": 64769,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 5602,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1732122400.0404687,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.362)       0:02:09.318 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.025)       0:02:09.344 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.020)       0:02:09.365 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.029)       0:02:09.394 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.024)       0:02:09.419 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.019)       0:02:09.438 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.024)       0:02:09.463 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Wednesday 20 November 2024  12:07:13 -0500 (0:00:00.018)       0:02:09.481 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.668)       0:02:10.150 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.018)       0:02:10.169 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.020)       0:02:10.190 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.042)       0:02:10.233 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.018)       0:02:10.251 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.018)       0:02:10.270 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.018)       0:02:10.288 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.018)       0:02:10.306 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.019)       0:02:10.326 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.046)       0:02:10.373 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.046)       0:02:10.419 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.037)       0:02:10.457 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.037)       0:02:10.495 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Wednesday 20 November 2024  12:07:14 -0500 (0:00:00.040)       0:02:10.535 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.058)       0:02:10.594 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.021)       0:02:10.615 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.018)       0:02:10.634 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.019)       0:02:10.653 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.018)       0:02:10.672 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.019)       0:02:10.691 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.022)       0:02:10.714 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.018)       0:02:10.732 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.019)       0:02:10.751 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.018)       0:02:10.770 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.018)       0:02:10.788 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.376)       0:02:11.165 **** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Wednesday 20 November 2024  12:07:15 -0500 (0:00:00.380)       0:02:11.545 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.044)       0:02:11.590 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.022)       0:02:11.612 **** 
ok: [managed-node3] => {
    "bytes": 10715943403,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.370)       0:02:11.983 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.040)       0:02:12.023 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.043)       0:02:12.067 **** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.045)       0:02:12.112 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.040)       0:02:12.153 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.018)       0:02:12.172 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.019)       0:02:12.192 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.018)       0:02:12.210 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.019)       0:02:12.229 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.021)       0:02:12.251 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.017)       0:02:12.269 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.018)       0:02:12.287 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.017)       0:02:12.305 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.018)       0:02:12.324 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.017)       0:02:12.341 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.020)       0:02:12.361 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.018)       0:02:12.380 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.018)       0:02:12.398 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.023)       0:02:12.422 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.018)       0:02:12.441 **** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.022)       0:02:12.463 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.024)       0:02:12.488 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Wednesday 20 November 2024  12:07:16 -0500 (0:00:00.046)       0:02:12.534 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.032901",
    "end": "2024-11-20 12:07:17.281761",
    "rc": 0,
    "start": "2024-11-20 12:07:17.248860"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.390)       0:02:12.925 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.042)       0:02:12.967 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.042)       0:02:13.009 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.042)       0:02:13.052 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.043)       0:02:13.095 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.038)       0:02:13.134 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.037)       0:02:13.171 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.019)       0:02:13.191 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.016)       0:02:13.207 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:262
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.019)       0:02:13.227 **** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.061)       0:02:13.288 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.029)       0:02:13.317 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.040)       0:02:13.358 **** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}
ok: [managed-node3] => (item=CentOS_10.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_10.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.049)       0:02:13.407 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.029)       0:02:13.437 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.022)       0:02:13.460 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.019)       0:02:13.480 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.022)       0:02:13.503 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.047)       0:02:13.550 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Wednesday 20 November 2024  12:07:17 -0500 (0:00:00.025)       0:02:13.576 **** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.024)       0:02:13.601 **** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.066)       0:02:13.668 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.026)       0:02:13.694 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.025)       0:02:13.719 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.025)       0:02:13.745 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.025)       0:02:13.770 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.057)       0:02:13.828 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Wednesday 20 November 2024  12:07:18 -0500 (0:00:00.022)       0:02:13.850 **** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "fs_type": "lvmpv"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "luks"
        }
    ],
    "changed": true,
    "crypts": [
        {
            "backing_device": "/dev/sda",
            "name": "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "password": "-",
            "state": "absent"
        }
    ],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test2",
            "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "_mount_id": "UUID=gKLeaO-F2W1-m4c2-WqPE-nFGU-o2Gd-MS93lX",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "lvmpv",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10729029632,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Wednesday 20 November 2024  12:07:20 -0500 (0:00:02.628)       0:02:16.479 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Wednesday 20 November 2024  12:07:20 -0500 (0:00:00.038)       0:02:16.517 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122422.5384715,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "d366c971f1b27065dbe3c19eeee61fc58d9f832e",
        "ctime": 1732122422.5344715,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 297795793,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1732122422.5344715,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "2100948536",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Wednesday 20 November 2024  12:07:21 -0500 (0:00:00.362)       0:02:16.880 **** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Wednesday 20 November 2024  12:07:21 -0500 (0:00:00.364)       0:02:17.244 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Wednesday 20 November 2024  12:07:21 -0500 (0:00:00.021)       0:02:17.266 **** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "fs_type": "lvmpv"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "luks"
            }
        ],
        "changed": true,
        "crypts": [
            {
                "backing_device": "/dev/sda",
                "name": "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "password": "-",
                "state": "absent"
            }
        ],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test2",
                "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "_mount_id": "UUID=gKLeaO-F2W1-m4c2-WqPE-nFGU-o2Gd-MS93lX",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "lvmpv",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10729029632,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Wednesday 20 November 2024  12:07:21 -0500 (0:00:00.026)       0:02:17.292 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Wednesday 20 November 2024  12:07:21 -0500 (0:00:00.023)       0:02:17.315 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
                "_mount_id": "UUID=gKLeaO-F2W1-m4c2-WqPE-nFGU-o2Gd-MS93lX",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": 0,
                "encryption_luks_version": "luks1",
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "lvmpv",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10729029632,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Wednesday 20 November 2024  12:07:21 -0500 (0:00:00.023)       0:02:17.339 **** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=ca5740d8-1199-4001-9345-528c8e9465b7', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test2",
        "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7",
        "state": "absent"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=ca5740d8-1199-4001-9345-528c8e9465b7"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Wednesday 20 November 2024  12:07:22 -0500 (0:00:00.391)       0:02:17.731 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Wednesday 20 November 2024  12:07:22 -0500 (0:00:00.735)       0:02:18.466 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Wednesday 20 November 2024  12:07:22 -0500 (0:00:00.054)       0:02:18.520 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Wednesday 20 November 2024  12:07:22 -0500 (0:00:00.040)       0:02:18.561 **** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Wednesday 20 November 2024  12:07:23 -0500 (0:00:00.744)       0:02:19.306 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122406.7204695,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "8d087bcc23525dee9e0fb6bcb9d4be652a38264f",
        "ctime": 1732122404.3934693,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 205521109,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0600",
        "mtime": 1732122404.3940144,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 53,
        "uid": 0,
        "version": "465084235",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Wednesday 20 November 2024  12:07:24 -0500 (0:00:00.358)       0:02:19.664 **** 
changed: [managed-node3] => (item={'backing_device': '/dev/sda', 'name': 'luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da', 'password': '-', 'state': 'absent'}) => {
    "ansible_loop_var": "entry",
    "backup": "",
    "changed": true,
    "entry": {
        "backing_device": "/dev/sda",
        "name": "luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
        "password": "-",
        "state": "absent"
    },
    "found": 1
}

MSG:

1 line(s) removed

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Wednesday 20 November 2024  12:07:24 -0500 (0:00:00.384)       0:02:20.049 **** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:272
Wednesday 20 November 2024  12:07:25 -0500 (0:00:00.908)       0:02:20.957 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Wednesday 20 November 2024  12:07:25 -0500 (0:00:00.044)       0:02:21.002 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Wednesday 20 November 2024  12:07:25 -0500 (0:00:00.039)       0:02:21.042 **** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da",
            "_mount_id": "UUID=gKLeaO-F2W1-m4c2-WqPE-nFGU-o2Gd-MS93lX",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": 0,
            "encryption_luks_version": "luks1",
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "lvmpv",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10729029632,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Wednesday 20 November 2024  12:07:25 -0500 (0:00:00.046)       0:02:21.088 **** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "63814bf2-dbd4-439c-b63b-6d05ca07d081"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Wednesday 20 November 2024  12:07:25 -0500 (0:00:00.357)       0:02:21.446 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003613",
    "end": "2024-11-20 12:07:26.161983",
    "rc": 0,
    "start": "2024-11-20 12:07:26.158370"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Nov 12 09:07:12 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=63814bf2-dbd4-439c-b63b-6d05ca07d081 /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.357)       0:02:21.803 **** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003011",
    "end": "2024-11-20 12:07:26.510119",
    "failed_when_result": false,
    "rc": 0,
    "start": "2024-11-20 12:07:26.507108"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.347)       0:02:22.151 **** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.017)       0:02:22.168 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': 0, 'encryption_luks_version': 'luks1', 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'lvmpv', 'mount_options': 'defaults', 'mount_point': None, 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'foo', 'raid_level': None, 'size': 10729029632, 'state': 'absent', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da', '_raw_device': '/dev/sda', '_mount_id': 'UUID=gKLeaO-F2W1-m4c2-WqPE-nFGU-o2Gd-MS93lX'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.059)       0:02:22.228 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.040)       0:02:22.268 **** 
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.100)       0:02:22.369 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/luks-ffbfa3bd-c7c7-4b9e-adde-557677df23da"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.029)       0:02:22.398 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.042)       0:02:22.441 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.018)       0:02:22.460 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.021)       0:02:22.481 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.019)       0:02:22.500 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.018)       0:02:22.519 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.024)       0:02:22.543 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.018)       0:02:22.561 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Wednesday 20 November 2024  12:07:26 -0500 (0:00:00.017)       0:02:22.579 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.020)       0:02:22.599 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.021)       0:02:22.620 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.059)       0:02:22.680 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.036)       0:02:22.717 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.041)       0:02:22.758 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.038)       0:02:22.796 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:51
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.025)       0:02:22.822 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.021)       0:02:22.843 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.020)       0:02:22.864 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.037)       0:02:22.902 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1732122440.771474,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1732122440.771474,
        "dev": 6,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 453,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1732122440.771474,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.359)       0:02:23.261 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.026)       0:02:23.287 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.021)       0:02:23.308 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.060)       0:02:23.369 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.024)       0:02:23.393 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.019)       0:02:23.413 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Wednesday 20 November 2024  12:07:27 -0500 (0:00:00.018)       0:02:23.431 **** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Wednesday 20 November 2024  12:07:28 -0500 (0:00:00.347)       0:02:23.779 **** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Wednesday 20 November 2024  12:07:28 -0500 (0:00:00.675)       0:02:24.455 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Wednesday 20 November 2024  12:07:28 -0500 (0:00:00.021)       0:02:24.477 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Wednesday 20 November 2024  12:07:28 -0500 (0:00:00.042)       0:02:24.519 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Wednesday 20 November 2024  12:07:28 -0500 (0:00:00.018)       0:02:24.538 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Wednesday 20 November 2024  12:07:28 -0500 (0:00:00.022)       0:02:24.560 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Wednesday 20 November 2024  12:07:28 -0500 (0:00:00.019)       0:02:24.580 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:24.598 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.020)       0:02:24.618 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:24.635 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.043)       0:02:24.679 **** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.041)       0:02:24.721 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.036)       0:02:24.757 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.040)       0:02:24.798 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.042)       0:02:24.840 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.019)       0:02:24.860 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.018)       0:02:24.879 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.018)       0:02:24.897 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.018)       0:02:24.916 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:24.933 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.019)       0:02:24.953 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.018)       0:02:24.971 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.018)       0:02:24.989 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:25.006 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:25.024 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:25.041 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.039)       0:02:25.080 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.034)       0:02:25.115 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.035)       0:02:25.150 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.022)       0:02:25.172 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.034)       0:02:25.207 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.037)       0:02:25.245 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.039)       0:02:25.284 **** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.035)       0:02:25.319 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.034)       0:02:25.354 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:25.372 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:25.389 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:25.407 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.020)       0:02:25.427 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.018)       0:02:25.445 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.017)       0:02:25.463 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.018)       0:02:25.481 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.019)       0:02:25.500 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.019)       0:02:25.519 **** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.020)       0:02:25.540 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.020)       0:02:25.560 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Wednesday 20 November 2024  12:07:29 -0500 (0:00:00.023)       0:02:25.584 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.019)       0:02:25.603 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.018)       0:02:25.622 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.017)       0:02:25.640 **** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.023)       0:02:25.663 **** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.021)       0:02:25.685 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.034)       0:02:25.720 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.018)       0:02:25.738 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.018)       0:02:25.757 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.017)       0:02:25.775 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.019)       0:02:25.794 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.017)       0:02:25.812 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.018)       0:02:25.830 **** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.017)       0:02:25.848 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.018)       0:02:25.867 **** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

PLAY RECAP *********************************************************************
managed-node3              : ok=583  changed=19   unreachable=0    failed=0    skipped=558  rescued=6    ignored=0   

Wednesday 20 November 2024  12:07:30 -0500 (0:00:00.012)       0:02:25.879 **** 
=============================================================================== 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 14.98s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 12.44s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.30s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.63s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Get service facts ------------------- 2.17s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.07s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.02s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.85s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Get required packages --------------- 1.64s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.64s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
Read the /etc/fstab file for volume existence --------------------------- 1.38s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 
fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab --- 1.28s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159 
Gathering Facts --------------------------------------------------------- 1.27s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:2 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.99s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.98s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.98s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.98s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.storage : Update facts ------------------------ 0.96s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
Write the key into the key file ----------------------------------------- 0.93s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/tests/storage/tests_luks_pool.yml:109 
fedora.linux_system_roles.storage : Make sure blivet is available ------- 0.91s
/tmp/collections-OFw/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2