ansible-playbook [core 2.17.7]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.12/site-packages/ansible
  ansible collection location = /tmp/collections-6DZ
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.12.8 (main, Dec  3 2024, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-2)] (/usr/bin/python3.12)
  jinja version = 3.1.5
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_misc.yml *******************************************************
1 plays in /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml

PLAY [Test misc features of the storage role] **********************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:2
Monday 20 January 2025  06:24:04 -0500 (0:00:00.026)       0:00:00.026 ******** 
[WARNING]: Platform linux on host managed-node3 is using the discovered Python
interpreter at /usr/bin/python3.9, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node3]

TASK [Include the role to ensure packages are installed] ***********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:15
Monday 20 January 2025  06:24:06 -0500 (0:00:01.512)       0:00:01.539 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:24:06 -0500 (0:00:00.057)       0:00:01.596 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:24:06 -0500 (0:00:00.052)       0:00:01.649 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:24:06 -0500 (0:00:00.066)       0:00:01.716 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:24:06 -0500 (0:00:00.085)       0:00:01.801 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:24:06 -0500 (0:00:00.493)       0:00:02.295 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:24:06 -0500 (0:00:00.028)       0:00:02.323 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:24:07 -0500 (0:00:00.016)       0:00:02.339 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:24:07 -0500 (0:00:00.016)       0:00:02.356 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:24:07 -0500 (0:00:00.046)       0:00:02.403 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:24:08 -0500 (0:00:01.529)       0:00:03.932 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:24:08 -0500 (0:00:00.021)       0:00:03.954 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:24:08 -0500 (0:00:00.022)       0:00:03.976 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:24:09 -0500 (0:00:00.689)       0:00:04.666 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Monday 20 January 2025  06:24:09 -0500 (0:00:00.038)       0:00:04.704 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Monday 20 January 2025  06:24:09 -0500 (0:00:00.034)       0:00:04.739 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "install_copr | d(false) | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Monday 20 January 2025  06:24:09 -0500 (0:00:00.036)       0:00:04.776 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:24:09 -0500 (0:00:00.035)       0:00:04.811 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:24:10 -0500 (0:00:01.352)       0:00:06.164 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autofs.service": {
                "name": "autofs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "cpupower.service": {
                "name": "cpupower.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "crond.service": {
                "name": "crond.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-makecache.service": {
                "name": "dnf-makecache.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fcoe.service": {
                "name": "fcoe.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "irqbalance.service": {
                "name": "irqbalance.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "iscsi-shutdown.service": {
                "name": "iscsi-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsi.service": {
                "name": "iscsi.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsid.service": {
                "name": "iscsid.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "kdump.service": {
                "name": "kdump.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "kvm_stat.service": {
                "name": "kvm_stat.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "logrotate.service": {
                "name": "logrotate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-activation-early.service": {
                "name": "lvm2-activation-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "microcode.service": {
                "name": "microcode.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "ndctl-monitor.service": {
                "name": "ndctl-monitor.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "oddjobd.service": {
                "name": "oddjobd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "qemu-guest-agent.service": {
                "name": "qemu-guest-agent.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "quotaon.service": {
                "name": "quotaon.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "raid-check.service": {
                "name": "raid-check.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "rbdmap.service": {
                "name": "rbdmap.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rdisc.service": {
                "name": "rdisc.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rsyslog.service": {
                "name": "rsyslog.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "snapd.seeded.service": {
                "name": "snapd.seeded.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-resume@.service": {
                "name": "systemd-hibernate-resume@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck.service": {
                "name": "systemd-quotacheck.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles.service": {
                "name": "systemd-tmpfiles.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "teamd@.service": {
                "name": "teamd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "ypbind.service": {
                "name": "ypbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "yppasswdd.service": {
                "name": "yppasswdd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ypserv.service": {
                "name": "ypserv.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ypxfrd.service": {
                "name": "ypxfrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:24:12 -0500 (0:00:01.963)       0:00:08.127 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:24:12 -0500 (0:00:00.089)       0:00:08.216 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:24:12 -0500 (0:00:00.026)       0:00:08.243 ******** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:24:13 -0500 (0:00:00.666)       0:00:08.909 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:24:13 -0500 (0:00:00.074)       0:00:08.984 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372130.8516738,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "85170829302ce3d8b3d8f3031aface16e161cfd6",
        "ctime": 1737372130.247673,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372130.247673,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:24:14 -0500 (0:00:00.441)       0:00:09.425 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:24:14 -0500 (0:00:00.078)       0:00:09.504 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:24:14 -0500 (0:00:00.038)       0:00:09.543 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:24:14 -0500 (0:00:00.049)       0:00:09.592 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:24:14 -0500 (0:00:00.040)       0:00:09.633 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:24:14 -0500 (0:00:00.069)       0:00:09.703 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:24:14 -0500 (0:00:00.111)       0:00:09.814 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:24:14 -0500 (0:00:00.057)       0:00:09.872 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:24:14 -0500 (0:00:00.104)       0:00:09.976 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:24:14 -0500 (0:00:00.071)       0:00:10.047 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:24:14 -0500 (0:00:00.039)       0:00:10.087 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:24:15 -0500 (0:00:00.449)       0:00:10.537 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:24:15 -0500 (0:00:00.038)       0:00:10.575 ******** 
ok: [managed-node3]

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:19
Monday 20 January 2025  06:24:16 -0500 (0:00:00.918)       0:00:11.494 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Get unused disks for test] ***********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:26
Monday 20 January 2025  06:24:16 -0500 (0:00:00.064)       0:00:11.558 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node3

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Monday 20 January 2025  06:24:16 -0500 (0:00:00.038)       0:00:11.597 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Monday 20 January 2025  06:24:17 -0500 (0:00:01.353)       0:00:12.951 ******** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sda"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Monday 20 January 2025  06:24:18 -0500 (0:00:00.764)       0:00:13.716 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Monday 20 January 2025  06:24:18 -0500 (0:00:00.063)       0:00:13.779 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Monday 20 January 2025  06:24:18 -0500 (0:00:00.064)       0:00:13.844 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Monday 20 January 2025  06:24:18 -0500 (0:00:00.118)       0:00:13.963 ******** 
ok: [managed-node3] => {
    "unused_disks": [
        "sda"
    ]
}

TASK [Test creating ext4 filesystem with valid parameter "-Fb 4096"] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:32
Monday 20 January 2025  06:24:18 -0500 (0:00:00.047)       0:00:14.010 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:24:18 -0500 (0:00:00.134)       0:00:14.145 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:24:18 -0500 (0:00:00.080)       0:00:14.225 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:24:19 -0500 (0:00:00.131)       0:00:14.356 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:24:19 -0500 (0:00:00.078)       0:00:14.435 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:24:19 -0500 (0:00:00.039)       0:00:14.474 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:24:19 -0500 (0:00:00.048)       0:00:14.523 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:24:19 -0500 (0:00:00.044)       0:00:14.568 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:24:19 -0500 (0:00:00.033)       0:00:14.601 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:24:19 -0500 (0:00:00.082)       0:00:14.684 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:24:19 -0500 (0:00:00.032)       0:00:14.717 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "fs_create_options": "-Fb 4096",
                    "fs_type": "ext4",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:24:19 -0500 (0:00:00.030)       0:00:14.748 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:24:19 -0500 (0:00:00.034)       0:00:14.782 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:24:19 -0500 (0:00:00.078)       0:00:14.860 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:24:19 -0500 (0:00:00.036)       0:00:14.897 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:24:19 -0500 (0:00:00.029)       0:00:14.927 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:24:19 -0500 (0:00:00.030)       0:00:14.957 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:24:19 -0500 (0:00:00.064)       0:00:15.021 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:24:19 -0500 (0:00:00.021)       0:00:15.043 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "ext4"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "ext4",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "e2fsprogs",
        "xfsprogs",
        "lvm2"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "-Fb 4096",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "ext4",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:24:21 -0500 (0:00:01.602)       0:00:16.646 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:24:21 -0500 (0:00:00.044)       0:00:16.690 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372130.8516738,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "85170829302ce3d8b3d8f3031aface16e161cfd6",
        "ctime": 1737372130.247673,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372130.247673,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:24:21 -0500 (0:00:00.404)       0:00:17.095 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:24:22 -0500 (0:00:00.750)       0:00:17.846 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:24:22 -0500 (0:00:00.042)       0:00:17.889 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "ext4"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "ext4",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "e2fsprogs",
            "xfsprogs",
            "lvm2"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "-Fb 4096",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "ext4",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:24:22 -0500 (0:00:00.057)       0:00:17.946 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "-Fb 4096",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "ext4",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:24:22 -0500 (0:00:00.054)       0:00:18.000 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:24:22 -0500 (0:00:00.051)       0:00:18.052 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:24:22 -0500 (0:00:00.086)       0:00:18.139 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:24:23 -0500 (0:00:00.968)       0:00:19.107 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:24:24 -0500 (0:00:00.520)       0:00:19.628 ******** 
skipping: [managed-node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:24:24 -0500 (0:00:00.070)       0:00:19.699 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:24:25 -0500 (0:00:00.709)       0:00:20.408 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:24:25 -0500 (0:00:00.385)       0:00:20.794 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:24:25 -0500 (0:00:00.022)       0:00:20.816 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:47
Monday 20 January 2025  06:24:26 -0500 (0:00:00.902)       0:00:21.719 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Monday 20 January 2025  06:24:26 -0500 (0:00:00.049)       0:00:21.768 ******** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "-Fb 4096",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "ext4",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Monday 20 January 2025  06:24:26 -0500 (0:00:00.072)       0:00:21.840 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Monday 20 January 2025  06:24:26 -0500 (0:00:00.069)       0:00:21.910 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "14efe614-ae9e-4c6a-8016-1731f97f72a4"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "49ww6R-X8K4-Vq3D-wpCf-IrGC-yLX9-WJo9v1"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "67db83fb-317b-4d3f-873e-b15c4c94e41e"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Monday 20 January 2025  06:24:27 -0500 (0:00:00.508)       0:00:22.418 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002854",
    "end": "2025-01-20 06:24:27.587163",
    "rc": 0,
    "start": "2025-01-20 06:24:27.584309"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:17 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=67db83fb-317b-4d3f-873e-b15c4c94e41e /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
/dev/mapper/foo-test1 /opt/test1 ext4 defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Monday 20 January 2025  06:24:27 -0500 (0:00:00.552)       0:00:22.970 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002711",
    "end": "2025-01-20 06:24:27.953877",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-20 06:24:27.951166"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Monday 20 January 2025  06:24:28 -0500 (0:00:00.368)       0:00:23.338 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '-Fb 4096', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Monday 20 January 2025  06:24:28 -0500 (0:00:00.061)       0:00:23.400 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Monday 20 January 2025  06:24:28 -0500 (0:00:00.021)       0:00:23.422 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.029289",
    "end": "2025-01-20 06:24:28.419120",
    "rc": 0,
    "start": "2025-01-20 06:24:28.389831"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Monday 20 January 2025  06:24:28 -0500 (0:00:00.381)       0:00:23.803 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Monday 20 January 2025  06:24:28 -0500 (0:00:00.028)       0:00:23.832 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 20 January 2025  06:24:28 -0500 (0:00:00.051)       0:00:23.883 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 20 January 2025  06:24:28 -0500 (0:00:00.077)       0:00:23.961 ******** 
ok: [managed-node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 20 January 2025  06:24:29 -0500 (0:00:00.685)       0:00:24.646 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 20 January 2025  06:24:29 -0500 (0:00:00.056)       0:00:24.702 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 20 January 2025  06:24:29 -0500 (0:00:00.074)       0:00:24.777 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 20 January 2025  06:24:29 -0500 (0:00:00.089)       0:00:24.866 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 20 January 2025  06:24:29 -0500 (0:00:00.041)       0:00:24.908 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 20 January 2025  06:24:29 -0500 (0:00:00.074)       0:00:24.982 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 20 January 2025  06:24:29 -0500 (0:00:00.032)       0:00:25.015 ******** 
ok: [managed-node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 20 January 2025  06:24:29 -0500 (0:00:00.049)       0:00:25.064 ******** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDOUT:


** (process:312510): WARNING **: 06:24:30.039: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.9.60 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.9.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Monday 20 January 2025  06:24:30 -0500 (0:00:00.444)       0:00:25.508 ******** 
skipping: [managed-node3] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "storage_test_pool.grow_to_fill | bool",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Monday 20 January 2025  06:24:30 -0500 (0:00:00.066)       0:00:25.575 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Monday 20 January 2025  06:24:30 -0500 (0:00:00.081)       0:00:25.656 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Monday 20 January 2025  06:24:30 -0500 (0:00:00.033)       0:00:25.690 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Monday 20 January 2025  06:24:30 -0500 (0:00:00.033)       0:00:25.723 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Monday 20 January 2025  06:24:30 -0500 (0:00:00.021)       0:00:25.744 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Monday 20 January 2025  06:24:30 -0500 (0:00:00.028)       0:00:25.773 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Monday 20 January 2025  06:24:30 -0500 (0:00:00.038)       0:00:25.811 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Monday 20 January 2025  06:24:30 -0500 (0:00:00.031)       0:00:25.843 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Monday 20 January 2025  06:24:30 -0500 (0:00:00.021)       0:00:25.865 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Monday 20 January 2025  06:24:30 -0500 (0:00:00.033)       0:00:25.898 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Monday 20 January 2025  06:24:30 -0500 (0:00:00.027)       0:00:25.925 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Monday 20 January 2025  06:24:30 -0500 (0:00:00.020)       0:00:25.946 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Monday 20 January 2025  06:24:30 -0500 (0:00:00.051)       0:00:25.998 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 20 January 2025  06:24:30 -0500 (0:00:00.041)       0:00:26.039 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '-Fb 4096', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Monday 20 January 2025  06:24:30 -0500 (0:00:00.042)       0:00:26.081 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Monday 20 January 2025  06:24:30 -0500 (0:00:00.027)       0:00:26.109 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Monday 20 January 2025  06:24:30 -0500 (0:00:00.050)       0:00:26.159 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Monday 20 January 2025  06:24:30 -0500 (0:00:00.070)       0:00:26.229 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Monday 20 January 2025  06:24:30 -0500 (0:00:00.063)       0:00:26.293 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Monday 20 January 2025  06:24:31 -0500 (0:00:00.053)       0:00:26.347 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Monday 20 January 2025  06:24:31 -0500 (0:00:00.074)       0:00:26.422 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Monday 20 January 2025  06:24:31 -0500 (0:00:00.056)       0:00:26.478 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 20 January 2025  06:24:31 -0500 (0:00:00.095)       0:00:26.573 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '-Fb 4096', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Monday 20 January 2025  06:24:31 -0500 (0:00:00.070)       0:00:26.644 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Monday 20 January 2025  06:24:31 -0500 (0:00:00.035)       0:00:26.679 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Monday 20 January 2025  06:24:31 -0500 (0:00:00.040)       0:00:26.720 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Monday 20 January 2025  06:24:31 -0500 (0:00:00.033)       0:00:26.753 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Monday 20 January 2025  06:24:31 -0500 (0:00:00.036)       0:00:26.790 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 20 January 2025  06:24:31 -0500 (0:00:00.077)       0:00:26.868 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 20 January 2025  06:24:31 -0500 (0:00:00.104)       0:00:26.972 ******** 
skipping: [managed-node3] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 20 January 2025  06:24:31 -0500 (0:00:00.079)       0:00:27.052 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node3 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Monday 20 January 2025  06:24:31 -0500 (0:00:00.071)       0:00:27.123 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Monday 20 January 2025  06:24:31 -0500 (0:00:00.086)       0:00:27.210 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Monday 20 January 2025  06:24:31 -0500 (0:00:00.098)       0:00:27.308 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Monday 20 January 2025  06:24:32 -0500 (0:00:00.065)       0:00:27.374 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Monday 20 January 2025  06:24:32 -0500 (0:00:00.063)       0:00:27.437 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Monday 20 January 2025  06:24:32 -0500 (0:00:00.067)       0:00:27.505 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 20 January 2025  06:24:32 -0500 (0:00:00.033)       0:00:27.539 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Monday 20 January 2025  06:24:32 -0500 (0:00:00.040)       0:00:27.580 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 20 January 2025  06:24:32 -0500 (0:00:00.099)       0:00:27.679 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '-Fb 4096', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Monday 20 January 2025  06:24:32 -0500 (0:00:00.068)       0:00:27.747 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Monday 20 January 2025  06:24:32 -0500 (0:00:00.036)       0:00:27.784 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Monday 20 January 2025  06:24:32 -0500 (0:00:00.035)       0:00:27.820 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Monday 20 January 2025  06:24:32 -0500 (0:00:00.034)       0:00:27.855 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Monday 20 January 2025  06:24:32 -0500 (0:00:00.031)       0:00:27.886 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Monday 20 January 2025  06:24:32 -0500 (0:00:00.040)       0:00:27.926 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Monday 20 January 2025  06:24:32 -0500 (0:00:00.060)       0:00:27.987 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Monday 20 January 2025  06:24:32 -0500 (0:00:00.040)       0:00:28.027 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 20 January 2025  06:24:32 -0500 (0:00:00.107)       0:00:28.135 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 20 January 2025  06:24:32 -0500 (0:00:00.060)       0:00:28.195 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 20 January 2025  06:24:32 -0500 (0:00:00.037)       0:00:28.233 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 20 January 2025  06:24:32 -0500 (0:00:00.048)       0:00:28.282 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 20 January 2025  06:24:33 -0500 (0:00:00.065)       0:00:28.347 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 20 January 2025  06:24:33 -0500 (0:00:00.063)       0:00:28.411 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Monday 20 January 2025  06:24:33 -0500 (0:00:00.045)       0:00:28.457 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 20 January 2025  06:24:33 -0500 (0:00:00.061)       0:00:28.518 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '-Fb 4096', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Monday 20 January 2025  06:24:33 -0500 (0:00:00.080)       0:00:28.599 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Monday 20 January 2025  06:24:33 -0500 (0:00:00.087)       0:00:28.686 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 20 January 2025  06:24:33 -0500 (0:00:00.295)       0:00:28.982 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 20 January 2025  06:24:33 -0500 (0:00:00.065)       0:00:29.048 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 20 January 2025  06:24:33 -0500 (0:00:00.074)       0:00:29.122 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 20 January 2025  06:24:33 -0500 (0:00:00.055)       0:00:29.178 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 20 January 2025  06:24:33 -0500 (0:00:00.047)       0:00:29.226 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 20 January 2025  06:24:33 -0500 (0:00:00.033)       0:00:29.259 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 20 January 2025  06:24:33 -0500 (0:00:00.056)       0:00:29.316 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 20 January 2025  06:24:34 -0500 (0:00:00.033)       0:00:29.349 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 20 January 2025  06:24:34 -0500 (0:00:00.043)       0:00:29.393 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 20 January 2025  06:24:34 -0500 (0:00:00.054)       0:00:29.448 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 20 January 2025  06:24:34 -0500 (0:00:00.034)       0:00:29.483 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 20 January 2025  06:24:34 -0500 (0:00:00.046)       0:00:29.529 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 ext4 defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 20 January 2025  06:24:34 -0500 (0:00:00.161)       0:00:29.691 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 20 January 2025  06:24:34 -0500 (0:00:00.099)       0:00:29.791 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 20 January 2025  06:24:34 -0500 (0:00:00.088)       0:00:29.880 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 20 January 2025  06:24:34 -0500 (0:00:00.081)       0:00:29.961 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Monday 20 January 2025  06:24:34 -0500 (0:00:00.073)       0:00:30.034 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 20 January 2025  06:24:34 -0500 (0:00:00.038)       0:00:30.073 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 20 January 2025  06:24:34 -0500 (0:00:00.078)       0:00:30.151 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 20 January 2025  06:24:34 -0500 (0:00:00.145)       0:00:30.297 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372261.2088213,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737372261.2088213,
        "dev": 5,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 9647,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1737372261.2088213,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 20 January 2025  06:24:35 -0500 (0:00:00.498)       0:00:30.796 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 20 January 2025  06:24:35 -0500 (0:00:00.088)       0:00:30.884 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 20 January 2025  06:24:35 -0500 (0:00:00.055)       0:00:30.939 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 20 January 2025  06:24:35 -0500 (0:00:00.049)       0:00:30.988 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 20 January 2025  06:24:35 -0500 (0:00:00.065)       0:00:31.054 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 20 January 2025  06:24:35 -0500 (0:00:00.059)       0:00:31.113 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 20 January 2025  06:24:35 -0500 (0:00:00.075)       0:00:31.189 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 20 January 2025  06:24:35 -0500 (0:00:00.051)       0:00:31.240 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 20 January 2025  06:24:37 -0500 (0:00:01.501)       0:00:32.742 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 20 January 2025  06:24:37 -0500 (0:00:00.069)       0:00:32.811 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 20 January 2025  06:24:37 -0500 (0:00:00.068)       0:00:32.880 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 20 January 2025  06:24:37 -0500 (0:00:00.127)       0:00:33.008 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 20 January 2025  06:24:37 -0500 (0:00:00.067)       0:00:33.075 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 20 January 2025  06:24:37 -0500 (0:00:00.057)       0:00:33.134 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 20 January 2025  06:24:37 -0500 (0:00:00.052)       0:00:33.186 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 20 January 2025  06:24:37 -0500 (0:00:00.060)       0:00:33.247 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 20 January 2025  06:24:37 -0500 (0:00:00.037)       0:00:33.286 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 20 January 2025  06:24:38 -0500 (0:00:00.137)       0:00:33.423 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 20 January 2025  06:24:38 -0500 (0:00:00.133)       0:00:33.557 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 20 January 2025  06:24:38 -0500 (0:00:00.092)       0:00:33.650 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 20 January 2025  06:24:38 -0500 (0:00:00.083)       0:00:33.734 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 20 January 2025  06:24:38 -0500 (0:00:00.085)       0:00:33.819 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 20 January 2025  06:24:38 -0500 (0:00:00.040)       0:00:33.860 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 20 January 2025  06:24:38 -0500 (0:00:00.034)       0:00:33.894 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 20 January 2025  06:24:38 -0500 (0:00:00.035)       0:00:33.930 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 20 January 2025  06:24:38 -0500 (0:00:00.040)       0:00:33.970 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 20 January 2025  06:24:38 -0500 (0:00:00.060)       0:00:34.030 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 20 January 2025  06:24:38 -0500 (0:00:00.058)       0:00:34.089 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 20 January 2025  06:24:38 -0500 (0:00:00.078)       0:00:34.168 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 20 January 2025  06:24:38 -0500 (0:00:00.058)       0:00:34.226 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 20 January 2025  06:24:38 -0500 (0:00:00.043)       0:00:34.270 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 20 January 2025  06:24:39 -0500 (0:00:00.067)       0:00:34.337 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 20 January 2025  06:24:39 -0500 (0:00:00.069)       0:00:34.406 ******** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 20 January 2025  06:24:39 -0500 (0:00:00.646)       0:00:35.053 ******** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 20 January 2025  06:24:40 -0500 (0:00:00.425)       0:00:35.478 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 20 January 2025  06:24:40 -0500 (0:00:00.075)       0:00:35.554 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 20 January 2025  06:24:40 -0500 (0:00:00.039)       0:00:35.593 ******** 
ok: [managed-node3] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 20 January 2025  06:24:40 -0500 (0:00:00.477)       0:00:36.071 ******** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 20 January 2025  06:24:40 -0500 (0:00:00.134)       0:00:36.205 ******** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 20 January 2025  06:24:41 -0500 (0:00:00.128)       0:00:36.334 ******** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 20 January 2025  06:24:41 -0500 (0:00:00.121)       0:00:36.456 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 20 January 2025  06:24:41 -0500 (0:00:00.127)       0:00:36.584 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 20 January 2025  06:24:41 -0500 (0:00:00.055)       0:00:36.639 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 20 January 2025  06:24:41 -0500 (0:00:00.134)       0:00:36.773 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 20 January 2025  06:24:41 -0500 (0:00:00.046)       0:00:36.820 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 20 January 2025  06:24:41 -0500 (0:00:00.032)       0:00:36.853 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 20 January 2025  06:24:41 -0500 (0:00:00.033)       0:00:36.887 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 20 January 2025  06:24:41 -0500 (0:00:00.052)       0:00:36.939 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 20 January 2025  06:24:41 -0500 (0:00:00.051)       0:00:36.990 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 20 January 2025  06:24:41 -0500 (0:00:00.062)       0:00:37.053 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 20 January 2025  06:24:41 -0500 (0:00:00.048)       0:00:37.101 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 20 January 2025  06:24:41 -0500 (0:00:00.065)       0:00:37.167 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 20 January 2025  06:24:41 -0500 (0:00:00.048)       0:00:37.215 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 20 January 2025  06:24:41 -0500 (0:00:00.052)       0:00:37.267 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 20 January 2025  06:24:41 -0500 (0:00:00.050)       0:00:37.318 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 20 January 2025  06:24:42 -0500 (0:00:00.037)       0:00:37.355 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 20 January 2025  06:24:42 -0500 (0:00:00.060)       0:00:37.415 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 20 January 2025  06:24:42 -0500 (0:00:00.068)       0:00:37.483 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 20 January 2025  06:24:42 -0500 (0:00:00.041)       0:00:37.525 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 20 January 2025  06:24:42 -0500 (0:00:00.086)       0:00:37.611 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.028534",
    "end": "2025-01-20 06:24:42.656095",
    "rc": 0,
    "start": "2025-01-20 06:24:42.627561"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 20 January 2025  06:24:42 -0500 (0:00:00.446)       0:00:38.058 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 20 January 2025  06:24:42 -0500 (0:00:00.109)       0:00:38.167 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 20 January 2025  06:24:42 -0500 (0:00:00.073)       0:00:38.240 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 20 January 2025  06:24:42 -0500 (0:00:00.065)       0:00:38.305 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 20 January 2025  06:24:43 -0500 (0:00:00.082)       0:00:38.388 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 20 January 2025  06:24:43 -0500 (0:00:00.092)       0:00:38.481 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Monday 20 January 2025  06:24:43 -0500 (0:00:00.095)       0:00:38.576 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Monday 20 January 2025  06:24:43 -0500 (0:00:00.045)       0:00:38.622 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Monday 20 January 2025  06:24:43 -0500 (0:00:00.031)       0:00:38.653 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Remove the volume group created above] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:50
Monday 20 January 2025  06:24:43 -0500 (0:00:00.036)       0:00:38.689 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:24:43 -0500 (0:00:00.067)       0:00:38.757 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:24:43 -0500 (0:00:00.038)       0:00:38.796 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:24:43 -0500 (0:00:00.046)       0:00:38.842 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:24:43 -0500 (0:00:00.053)       0:00:38.896 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:24:43 -0500 (0:00:00.029)       0:00:38.925 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:24:43 -0500 (0:00:00.037)       0:00:38.962 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:24:43 -0500 (0:00:00.076)       0:00:39.039 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:24:43 -0500 (0:00:00.078)       0:00:39.118 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:24:43 -0500 (0:00:00.141)       0:00:39.259 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:24:43 -0500 (0:00:00.065)       0:00:39.325 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "type": "lvm"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:24:44 -0500 (0:00:00.041)       0:00:39.366 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:24:44 -0500 (0:00:00.057)       0:00:39.424 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:24:44 -0500 (0:00:00.068)       0:00:39.493 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:24:44 -0500 (0:00:00.050)       0:00:39.543 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:24:44 -0500 (0:00:00.040)       0:00:39.583 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:24:44 -0500 (0:00:00.061)       0:00:39.645 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:24:44 -0500 (0:00:00.154)       0:00:39.799 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:24:44 -0500 (0:00:00.035)       0:00:39.834 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "ext4"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "fstype": "ext4",
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:24:46 -0500 (0:00:01.727)       0:00:41.562 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:24:46 -0500 (0:00:00.109)       0:00:41.671 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372264.2118247,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "c32bf202c7b2a23432ca8d6114913e6720bfb078",
        "ctime": 1737372264.2078247,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372264.2078247,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1417,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:24:46 -0500 (0:00:00.521)       0:00:42.193 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:24:47 -0500 (0:00:00.442)       0:00:42.636 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:24:47 -0500 (0:00:00.034)       0:00:42.670 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "ext4"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "fstype": "ext4",
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:24:47 -0500 (0:00:00.044)       0:00:42.715 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:24:47 -0500 (0:00:00.041)       0:00:42.757 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:24:47 -0500 (0:00:00.041)       0:00:42.798 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'ext4'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "fstype": "ext4",
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:24:47 -0500 (0:00:00.437)       0:00:43.236 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:24:48 -0500 (0:00:00.683)       0:00:43.920 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:24:48 -0500 (0:00:00.055)       0:00:43.975 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:24:48 -0500 (0:00:00.057)       0:00:44.033 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:24:49 -0500 (0:00:00.749)       0:00:44.782 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:24:49 -0500 (0:00:00.462)       0:00:45.245 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:24:49 -0500 (0:00:00.032)       0:00:45.277 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:60
Monday 20 January 2025  06:24:51 -0500 (0:00:01.065)       0:00:46.343 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Monday 20 January 2025  06:24:51 -0500 (0:00:00.066)       0:00:46.409 ******** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Monday 20 January 2025  06:24:51 -0500 (0:00:00.054)       0:00:46.463 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Monday 20 January 2025  06:24:51 -0500 (0:00:00.041)       0:00:46.505 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "67db83fb-317b-4d3f-873e-b15c4c94e41e"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Monday 20 January 2025  06:24:51 -0500 (0:00:00.369)       0:00:46.875 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.003836",
    "end": "2025-01-20 06:24:52.906921",
    "rc": 0,
    "start": "2025-01-20 06:24:51.903085"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:17 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=67db83fb-317b-4d3f-873e-b15c4c94e41e /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Monday 20 January 2025  06:24:52 -0500 (0:00:01.447)       0:00:48.323 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002807",
    "end": "2025-01-20 06:24:53.400026",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-20 06:24:53.397219"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Monday 20 January 2025  06:24:53 -0500 (0:00:00.476)       0:00:48.799 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'lvm', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Monday 20 January 2025  06:24:53 -0500 (0:00:00.098)       0:00:48.897 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Monday 20 January 2025  06:24:53 -0500 (0:00:00.025)       0:00:48.923 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Monday 20 January 2025  06:24:53 -0500 (0:00:00.023)       0:00:48.946 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Monday 20 January 2025  06:24:53 -0500 (0:00:00.034)       0:00:48.981 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 20 January 2025  06:24:53 -0500 (0:00:00.079)       0:00:49.060 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "0",
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 20 January 2025  06:24:53 -0500 (0:00:00.063)       0:00:49.124 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 20 January 2025  06:24:53 -0500 (0:00:00.019)       0:00:49.144 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "0"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 20 January 2025  06:24:53 -0500 (0:00:00.056)       0:00:49.200 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": []
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 20 January 2025  06:24:53 -0500 (0:00:00.048)       0:00:49.249 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 20 January 2025  06:24:53 -0500 (0:00:00.046)       0:00:49.296 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 20 January 2025  06:24:53 -0500 (0:00:00.028)       0:00:49.325 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 20 January 2025  06:24:54 -0500 (0:00:00.050)       0:00:49.376 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 20 January 2025  06:24:54 -0500 (0:00:00.021)       0:00:49.397 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 20 January 2025  06:24:54 -0500 (0:00:00.031)       0:00:49.429 ******** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDOUT:


** (process:314976): WARNING **: 06:24:54.351: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.9.60 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.9.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Monday 20 January 2025  06:24:54 -0500 (0:00:00.395)       0:00:49.824 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Monday 20 January 2025  06:24:54 -0500 (0:00:00.062)       0:00:49.886 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Monday 20 January 2025  06:24:54 -0500 (0:00:00.074)       0:00:49.961 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Monday 20 January 2025  06:24:54 -0500 (0:00:00.023)       0:00:49.984 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Monday 20 January 2025  06:24:54 -0500 (0:00:00.021)       0:00:50.006 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Monday 20 January 2025  06:24:54 -0500 (0:00:00.027)       0:00:50.034 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Monday 20 January 2025  06:24:54 -0500 (0:00:00.022)       0:00:50.057 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Monday 20 January 2025  06:24:54 -0500 (0:00:00.028)       0:00:50.085 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Monday 20 January 2025  06:24:54 -0500 (0:00:00.025)       0:00:50.110 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Monday 20 January 2025  06:24:54 -0500 (0:00:00.020)       0:00:50.131 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Monday 20 January 2025  06:24:54 -0500 (0:00:00.021)       0:00:50.152 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Monday 20 January 2025  06:24:54 -0500 (0:00:00.021)       0:00:50.173 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Monday 20 January 2025  06:24:54 -0500 (0:00:00.020)       0:00:50.193 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Monday 20 January 2025  06:24:54 -0500 (0:00:00.025)       0:00:50.219 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 20 January 2025  06:24:54 -0500 (0:00:00.042)       0:00:50.261 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Monday 20 January 2025  06:24:54 -0500 (0:00:00.018)       0:00:50.280 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 20 January 2025  06:24:55 -0500 (0:00:00.051)       0:00:50.332 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Monday 20 January 2025  06:24:55 -0500 (0:00:00.032)       0:00:50.364 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 20 January 2025  06:24:55 -0500 (0:00:00.077)       0:00:50.442 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 20 January 2025  06:24:55 -0500 (0:00:00.069)       0:00:50.511 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 20 January 2025  06:24:55 -0500 (0:00:00.034)       0:00:50.546 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 20 January 2025  06:24:55 -0500 (0:00:00.033)       0:00:50.579 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Monday 20 January 2025  06:24:55 -0500 (0:00:00.040)       0:00:50.620 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 20 January 2025  06:24:55 -0500 (0:00:00.058)       0:00:50.679 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Monday 20 January 2025  06:24:55 -0500 (0:00:00.024)       0:00:50.703 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 20 January 2025  06:24:55 -0500 (0:00:00.064)       0:00:50.768 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 20 January 2025  06:24:55 -0500 (0:00:00.021)       0:00:50.790 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 20 January 2025  06:24:55 -0500 (0:00:00.021)       0:00:50.811 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 20 January 2025  06:24:55 -0500 (0:00:00.021)       0:00:50.832 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 20 January 2025  06:24:55 -0500 (0:00:00.022)       0:00:50.854 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 20 January 2025  06:24:55 -0500 (0:00:00.057)       0:00:50.912 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Monday 20 January 2025  06:24:55 -0500 (0:00:00.028)       0:00:50.940 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 20 January 2025  06:24:55 -0500 (0:00:00.027)       0:00:50.968 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Monday 20 January 2025  06:24:55 -0500 (0:00:00.033)       0:00:51.001 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Monday 20 January 2025  06:24:55 -0500 (0:00:00.031)       0:00:51.033 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Test for correct handling of invalid parameter when creating ext4 filesystem] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:63
Monday 20 January 2025  06:24:55 -0500 (0:00:00.038)       0:00:51.072 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node3

TASK [Store global variable value copy] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4
Monday 20 January 2025  06:24:55 -0500 (0:00:00.076)       0:00:51.148 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": false,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10
Monday 20 January 2025  06:24:55 -0500 (0:00:00.077)       0:00:51.225 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:24:55 -0500 (0:00:00.061)       0:00:51.287 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:24:56 -0500 (0:00:00.054)       0:00:51.342 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:24:56 -0500 (0:00:00.071)       0:00:51.413 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:24:56 -0500 (0:00:00.090)       0:00:51.504 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:24:56 -0500 (0:00:00.043)       0:00:51.547 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:24:56 -0500 (0:00:00.046)       0:00:51.594 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:24:56 -0500 (0:00:00.034)       0:00:51.628 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:24:56 -0500 (0:00:00.038)       0:00:51.666 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:24:56 -0500 (0:00:00.069)       0:00:51.736 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:24:56 -0500 (0:00:00.033)       0:00:51.769 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "fs_create_options": "-Fb 512",
                    "fs_type": "ext4",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:24:56 -0500 (0:00:00.028)       0:00:51.798 ******** 
ok: [managed-node3] => {
    "storage_volumes": []
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:24:56 -0500 (0:00:00.028)       0:00:51.827 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:24:56 -0500 (0:00:00.026)       0:00:51.853 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:24:56 -0500 (0:00:00.025)       0:00:51.879 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:24:56 -0500 (0:00:00.026)       0:00:51.906 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:24:56 -0500 (0:00:00.029)       0:00:51.935 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:24:56 -0500 (0:00:00.076)       0:00:52.011 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:24:56 -0500 (0:00:00.033)       0:00:52.045 ******** 
fatal: [managed-node3]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [
        "e2fsprogs",
        "xfsprogs",
        "lvm2"
    ],
    "pools": [],
    "volumes": []
}

MSG:

Failed to commit changes to disk: (FSError('format failed: 1'), '/dev/mapper/foo-test1')

TASK [fedora.linux_system_roles.storage : Failed message] **********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:109
Monday 20 January 2025  06:24:58 -0500 (0:00:01.522)       0:00:53.567 ******** 
fatal: [managed-node3]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': ['e2fsprogs', 'xfsprogs', 'lvm2'], 'failed': True, 'msg': "Failed to commit changes to disk: (FSError('format failed: 1'), '/dev/mapper/foo-test1')", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '-Fb 512', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1'}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': False, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:24:58 -0500 (0:00:00.099)       0:00:53.667 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23
Monday 20 January 2025  06:24:58 -0500 (0:00:00.061)       0:00:53.729 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28
Monday 20 January 2025  06:24:58 -0500 (0:00:00.078)       0:00:53.807 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39
Monday 20 January 2025  06:24:58 -0500 (0:00:00.062)       0:00:53.870 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_failed_exception is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Remove the volume group created above] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:87
Monday 20 January 2025  06:24:58 -0500 (0:00:00.055)       0:00:53.925 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:24:58 -0500 (0:00:00.135)       0:00:54.061 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:24:58 -0500 (0:00:00.051)       0:00:54.112 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:24:58 -0500 (0:00:00.079)       0:00:54.192 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:24:58 -0500 (0:00:00.119)       0:00:54.312 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:24:59 -0500 (0:00:00.073)       0:00:54.385 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:24:59 -0500 (0:00:00.046)       0:00:54.432 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:24:59 -0500 (0:00:00.041)       0:00:54.473 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:24:59 -0500 (0:00:00.043)       0:00:54.517 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:24:59 -0500 (0:00:00.101)       0:00:54.619 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:24:59 -0500 (0:00:00.044)       0:00:54.663 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "type": "lvm"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:24:59 -0500 (0:00:00.042)       0:00:54.706 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:24:59 -0500 (0:00:00.040)       0:00:54.747 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:24:59 -0500 (0:00:00.041)       0:00:54.789 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:24:59 -0500 (0:00:00.046)       0:00:54.836 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:24:59 -0500 (0:00:00.056)       0:00:54.892 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:24:59 -0500 (0:00:00.043)       0:00:54.935 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:24:59 -0500 (0:00:00.093)       0:00:55.028 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:24:59 -0500 (0:00:00.033)       0:00:55.062 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:25:01 -0500 (0:00:01.596)       0:00:56.659 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:25:01 -0500 (0:00:00.070)       0:00:56.729 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372288.405852,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "85170829302ce3d8b3d8f3031aface16e161cfd6",
        "ctime": 1737372287.8478515,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372287.8478515,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:25:01 -0500 (0:00:00.382)       0:00:57.112 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:25:02 -0500 (0:00:00.441)       0:00:57.553 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:25:02 -0500 (0:00:00.024)       0:00:57.577 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:25:02 -0500 (0:00:00.041)       0:00:57.619 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:25:02 -0500 (0:00:00.040)       0:00:57.659 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:25:02 -0500 (0:00:00.038)       0:00:57.697 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:25:02 -0500 (0:00:00.068)       0:00:57.766 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:25:02 -0500 (0:00:00.035)       0:00:57.801 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:25:02 -0500 (0:00:00.066)       0:00:57.868 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:25:02 -0500 (0:00:00.095)       0:00:57.964 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:25:02 -0500 (0:00:00.066)       0:00:58.030 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:25:03 -0500 (0:00:00.499)       0:00:58.530 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:25:03 -0500 (0:00:00.034)       0:00:58.565 ******** 
ok: [managed-node3]

TASK [Create one LVM logical volume under one volume group, size 4g] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:97
Monday 20 January 2025  06:25:04 -0500 (0:00:00.959)       0:00:59.524 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:25:04 -0500 (0:00:00.138)       0:00:59.662 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:25:04 -0500 (0:00:00.061)       0:00:59.724 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:25:04 -0500 (0:00:00.070)       0:00:59.795 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:25:04 -0500 (0:00:00.086)       0:00:59.882 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:25:04 -0500 (0:00:00.044)       0:00:59.926 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:25:04 -0500 (0:00:00.049)       0:00:59.975 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:25:04 -0500 (0:00:00.044)       0:01:00.020 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:25:04 -0500 (0:00:00.044)       0:01:00.064 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:25:04 -0500 (0:00:00.172)       0:01:00.237 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:25:04 -0500 (0:00:00.061)       0:01:00.298 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "fs_type": "ext4",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:25:05 -0500 (0:00:00.053)       0:01:00.351 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:25:05 -0500 (0:00:00.053)       0:01:00.405 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:25:05 -0500 (0:00:00.053)       0:01:00.459 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:25:05 -0500 (0:00:00.043)       0:01:00.502 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:25:05 -0500 (0:00:00.047)       0:01:00.549 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:25:05 -0500 (0:00:00.045)       0:01:00.595 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:25:05 -0500 (0:00:00.127)       0:01:00.722 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:25:05 -0500 (0:00:00.036)       0:01:00.758 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        },
        {
            "action": "create device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "ext4"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/mapper/foo-test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "ext4",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "mounted"
        }
    ],
    "packages": [
        "lvm2",
        "xfsprogs",
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "ext4",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:25:07 -0500 (0:00:01.753)       0:01:02.512 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:25:07 -0500 (0:00:00.144)       0:01:02.657 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372288.405852,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "85170829302ce3d8b3d8f3031aface16e161cfd6",
        "ctime": 1737372287.8478515,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372287.8478515,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:25:07 -0500 (0:00:00.517)       0:01:03.174 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:25:08 -0500 (0:00:00.531)       0:01:03.705 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:25:08 -0500 (0:00:00.082)       0:01:03.788 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            },
            {
                "action": "create device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "ext4"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/mapper/foo-test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "ext4",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "mounted"
            }
        ],
        "packages": [
            "lvm2",
            "xfsprogs",
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "ext4",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:25:08 -0500 (0:00:00.072)       0:01:03.861 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "lvm",
                "volumes": [
                    {
                        "_device": "/dev/mapper/foo-test1",
                        "_kernel_device": "/dev/dm-0",
                        "_mount_id": "/dev/mapper/foo-test1",
                        "_raw_device": "/dev/mapper/foo-test1",
                        "_raw_kernel_device": "/dev/dm-0",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "ext4",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "lvm",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:25:08 -0500 (0:00:00.064)       0:01:03.925 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:25:08 -0500 (0:00:00.076)       0:01:04.001 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:25:08 -0500 (0:00:00.121)       0:01:04.123 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:25:09 -0500 (0:00:00.811)       0:01:04.934 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:25:10 -0500 (0:00:00.457)       0:01:05.392 ******** 
skipping: [managed-node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:25:10 -0500 (0:00:00.091)       0:01:05.483 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:25:10 -0500 (0:00:00.807)       0:01:06.290 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:25:11 -0500 (0:00:00.521)       0:01:06.812 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:25:11 -0500 (0:00:00.082)       0:01:06.894 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:113
Monday 20 January 2025  06:25:12 -0500 (0:00:00.994)       0:01:07.888 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Monday 20 January 2025  06:25:12 -0500 (0:00:00.087)       0:01:07.976 ******** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "lvm",
            "volumes": [
                {
                    "_device": "/dev/mapper/foo-test1",
                    "_kernel_device": "/dev/dm-0",
                    "_mount_id": "/dev/mapper/foo-test1",
                    "_raw_device": "/dev/mapper/foo-test1",
                    "_raw_kernel_device": "/dev/dm-0",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "ext4",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "lvm",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Monday 20 January 2025  06:25:12 -0500 (0:00:00.078)       0:01:08.055 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Monday 20 January 2025  06:25:12 -0500 (0:00:00.067)       0:01:08.122 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/foo-test1": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/mapper/foo-test1",
            "size": "4G",
            "type": "lvm",
            "uuid": "30a1a519-8e18-445d-ab70-4cf077e11b65"
        },
        "/dev/sda": {
            "fstype": "LVM2_member",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "1G9OhW-OWbB-QmKu-CsSp-xBQ6-Fp2S-UzHkhe"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "67db83fb-317b-4d3f-873e-b15c4c94e41e"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Monday 20 January 2025  06:25:13 -0500 (0:00:00.382)       0:01:08.505 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002729",
    "end": "2025-01-20 06:25:13.504119",
    "rc": 0,
    "start": "2025-01-20 06:25:13.501390"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:17 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=67db83fb-317b-4d3f-873e-b15c4c94e41e /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
/dev/mapper/foo-test1 /opt/test1 ext4 defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Monday 20 January 2025  06:25:13 -0500 (0:00:00.408)       0:01:08.913 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002917",
    "end": "2025-01-20 06:25:13.904182",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-20 06:25:13.901265"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Monday 20 January 2025  06:25:13 -0500 (0:00:00.386)       0:01:09.300 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Monday 20 January 2025  06:25:14 -0500 (0:00:00.220)       0:01:09.521 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Monday 20 January 2025  06:25:14 -0500 (0:00:00.031)       0:01:09.552 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "vgs",
        "--noheadings",
        "--binary",
        "-o",
        "shared",
        "foo"
    ],
    "delta": "0:00:00.027488",
    "end": "2025-01-20 06:25:14.551063",
    "rc": 0,
    "start": "2025-01-20 06:25:14.523575"
}

STDOUT:

        0

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Monday 20 January 2025  06:25:14 -0500 (0:00:00.380)       0:01:09.933 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Monday 20 January 2025  06:25:14 -0500 (0:00:00.030)       0:01:09.963 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 20 January 2025  06:25:14 -0500 (0:00:00.048)       0:01:10.011 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_count": "1",
        "_storage_test_pool_pvs_lvm": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 20 January 2025  06:25:14 -0500 (0:00:00.053)       0:01:10.065 ******** 
ok: [managed-node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "device": "/dev/sda",
    "pv": "/dev/sda"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 20 January 2025  06:25:15 -0500 (0:00:00.370)       0:01:10.436 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": "1"
    },
    "changed": false
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 20 January 2025  06:25:15 -0500 (0:00:00.072)       0:01:10.509 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_pool_pvs": [
            "/dev/sda"
        ]
    },
    "changed": false
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 20 January 2025  06:25:15 -0500 (0:00:00.103)       0:01:10.613 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 20 January 2025  06:25:15 -0500 (0:00:00.104)       0:01:10.717 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 20 January 2025  06:25:15 -0500 (0:00:00.065)       0:01:10.782 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_pv_type": "disk"
    },
    "changed": false
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 20 January 2025  06:25:15 -0500 (0:00:00.079)       0:01:10.862 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 20 January 2025  06:25:15 -0500 (0:00:00.062)       0:01:10.924 ******** 
ok: [managed-node3] => (item=/dev/sda) => {
    "ansible_loop_var": "pv",
    "changed": false,
    "pv": "/dev/sda"
}

MSG:

All assertions passed

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 20 January 2025  06:25:15 -0500 (0:00:00.062)       0:01:10.987 ******** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDOUT:


** (process:317887): WARNING **: 06:25:15.924: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.9.60 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.9.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Monday 20 January 2025  06:25:16 -0500 (0:00:00.415)       0:01:11.402 ******** 
skipping: [managed-node3] => (item=/dev/sda)  => {
    "ansible_loop_var": "st_pool_pv",
    "changed": false,
    "false_condition": "storage_test_pool.grow_to_fill | bool",
    "skip_reason": "Conditional result was False",
    "st_pool_pv": "/dev/sda"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Monday 20 January 2025  06:25:16 -0500 (0:00:00.091)       0:01:11.494 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Monday 20 January 2025  06:25:16 -0500 (0:00:00.102)       0:01:11.597 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Monday 20 January 2025  06:25:16 -0500 (0:00:00.050)       0:01:11.648 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Monday 20 January 2025  06:25:16 -0500 (0:00:00.038)       0:01:11.686 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Monday 20 January 2025  06:25:16 -0500 (0:00:00.055)       0:01:11.742 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Monday 20 January 2025  06:25:16 -0500 (0:00:00.034)       0:01:11.776 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Monday 20 January 2025  06:25:16 -0500 (0:00:00.049)       0:01:11.825 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Monday 20 January 2025  06:25:16 -0500 (0:00:00.047)       0:01:11.873 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Monday 20 January 2025  06:25:16 -0500 (0:00:00.036)       0:01:11.909 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Monday 20 January 2025  06:25:16 -0500 (0:00:00.034)       0:01:11.944 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Monday 20 January 2025  06:25:16 -0500 (0:00:00.036)       0:01:11.980 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Monday 20 January 2025  06:25:16 -0500 (0:00:00.044)       0:01:12.025 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Monday 20 January 2025  06:25:16 -0500 (0:00:00.073)       0:01:12.098 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 20 January 2025  06:25:16 -0500 (0:00:00.089)       0:01:12.187 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:8
Monday 20 January 2025  06:25:16 -0500 (0:00:00.082)       0:01:12.270 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:16
Monday 20 January 2025  06:25:16 -0500 (0:00:00.049)       0:01:12.320 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:20
Monday 20 January 2025  06:25:17 -0500 (0:00:00.058)       0:01:12.379 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV stripe size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:27
Monday 20 January 2025  06:25:17 -0500 (0:00:00.042)       0:01:12.421 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested stripe size] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:31
Monday 20 January 2025  06:25:17 -0500 (0:00:00.041)       0:01:12.463 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected stripe size] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:37
Monday 20 January 2025  06:25:17 -0500 (0:00:00.046)       0:01:12.509 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check stripe size] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-lvmraid.yml:42
Monday 20 January 2025  06:25:17 -0500 (0:00:00.043)       0:01:12.553 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_lvmraid_volume.raid_level is not none",
    "skip_reason": "Conditional result was False"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Monday 20 January 2025  06:25:17 -0500 (0:00:00.043)       0:01:12.597 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 20 January 2025  06:25:17 -0500 (0:00:00.071)       0:01:12.668 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about thinpool] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:8
Monday 20 January 2025  06:25:17 -0500 (0:00:00.062)       0:01:12.730 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in correct thinpool (when thinp name is provided)] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:16
Monday 20 January 2025  06:25:17 -0500 (0:00:00.034)       0:01:12.765 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Check that volume is in thinpool (when thinp name is not provided)] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:22
Monday 20 January 2025  06:25:17 -0500 (0:00:00.034)       0:01:12.800 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_thin_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-thin.yml:26
Monday 20 January 2025  06:25:17 -0500 (0:00:00.032)       0:01:12.833 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_thin_status": null
    },
    "changed": false
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Monday 20 January 2025  06:25:17 -0500 (0:00:00.038)       0:01:12.871 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 20 January 2025  06:25:17 -0500 (0:00:00.094)       0:01:12.965 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 20 January 2025  06:25:17 -0500 (0:00:00.085)       0:01:13.050 ******** 
skipping: [managed-node3] => (item=/dev/sda)  => {
    "_storage_test_pool_member_path": "/dev/sda",
    "ansible_loop_var": "_storage_test_pool_member_path",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 20 January 2025  06:25:17 -0500 (0:00:00.058)       0:01:13.109 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml for managed-node3 => (item=/dev/sda)

TASK [Set variables used by tests] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:2
Monday 20 January 2025  06:25:17 -0500 (0:00:00.085)       0:01:13.195 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": []
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:6
Monday 20 January 2025  06:25:17 -0500 (0:00:00.095)       0:01:13.290 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:14
Monday 20 January 2025  06:25:18 -0500 (0:00:00.119)       0:01:13.409 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:23
Monday 20 January 2025  06:25:18 -0500 (0:00:00.081)       0:01:13.491 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "false and _storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:32
Monday 20 January 2025  06:25:18 -0500 (0:00:00.089)       0:01:13.581 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-crypttab.yml:41
Monday 20 January 2025  06:25:18 -0500 (0:00:00.064)       0:01:13.645 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null
    },
    "changed": false
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 20 January 2025  06:25:18 -0500 (0:00:00.039)       0:01:13.685 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Monday 20 January 2025  06:25:18 -0500 (0:00:00.093)       0:01:13.778 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 20 January 2025  06:25:18 -0500 (0:00:00.091)       0:01:13.870 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Get information about VDO deduplication] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:8
Monday 20 January 2025  06:25:18 -0500 (0:00:00.113)       0:01:13.983 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:15
Monday 20 January 2025  06:25:18 -0500 (0:00:00.043)       0:01:14.027 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:21
Monday 20 January 2025  06:25:18 -0500 (0:00:00.041)       0:01:14.068 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about VDO compression] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:27
Monday 20 January 2025  06:25:18 -0500 (0:00:00.051)       0:01:14.120 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is off] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:34
Monday 20 January 2025  06:25:18 -0500 (0:00:00.044)       0:01:14.164 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check if VDO deduplication is on] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:40
Monday 20 January 2025  06:25:18 -0500 (0:00:00.037)       0:01:14.201 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_vdo_volume.deduplication != none or storage_test_vdo_volume.compression != none",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-member-vdo.yml:46
Monday 20 January 2025  06:25:18 -0500 (0:00:00.032)       0:01:14.234 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_vdo_status": null
    },
    "changed": false
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Monday 20 January 2025  06:25:18 -0500 (0:00:00.035)       0:01:14.270 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 20 January 2025  06:25:19 -0500 (0:00:00.089)       0:01:14.359 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 20 January 2025  06:25:19 -0500 (0:00:00.055)       0:01:14.415 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 20 January 2025  06:25:19 -0500 (0:00:00.041)       0:01:14.457 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 20 January 2025  06:25:19 -0500 (0:00:00.042)       0:01:14.499 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 20 January 2025  06:25:19 -0500 (0:00:00.038)       0:01:14.538 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 20 January 2025  06:25:19 -0500 (0:00:00.042)       0:01:14.580 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Monday 20 January 2025  06:25:19 -0500 (0:00:00.040)       0:01:14.621 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 20 January 2025  06:25:19 -0500 (0:00:00.041)       0:01:14.663 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/mapper/foo-test1', '_raw_device': '/dev/mapper/foo-test1', '_mount_id': '/dev/mapper/foo-test1', '_kernel_device': '/dev/dm-0', '_raw_kernel_device': '/dev/dm-0'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Monday 20 January 2025  06:25:19 -0500 (0:00:00.061)       0:01:14.724 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Monday 20 January 2025  06:25:19 -0500 (0:00:00.072)       0:01:14.796 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 20 January 2025  06:25:19 -0500 (0:00:00.267)       0:01:15.064 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/mapper/foo-test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 20 January 2025  06:25:19 -0500 (0:00:00.085)       0:01:15.149 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 20 January 2025  06:25:19 -0500 (0:00:00.099)       0:01:15.249 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 20 January 2025  06:25:19 -0500 (0:00:00.054)       0:01:15.303 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 20 January 2025  06:25:20 -0500 (0:00:00.051)       0:01:15.355 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 20 January 2025  06:25:20 -0500 (0:00:00.040)       0:01:15.395 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 20 January 2025  06:25:20 -0500 (0:00:00.034)       0:01:15.430 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 20 January 2025  06:25:20 -0500 (0:00:00.035)       0:01:15.465 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 20 January 2025  06:25:20 -0500 (0:00:00.033)       0:01:15.499 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 20 January 2025  06:25:20 -0500 (0:00:00.034)       0:01:15.534 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 20 January 2025  06:25:20 -0500 (0:00:00.047)       0:01:15.581 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 20 January 2025  06:25:20 -0500 (0:00:00.048)       0:01:15.629 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "/dev/mapper/foo-test1 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 ext4 defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 20 January 2025  06:25:20 -0500 (0:00:00.157)       0:01:15.787 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 20 January 2025  06:25:20 -0500 (0:00:00.124)       0:01:15.911 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 20 January 2025  06:25:20 -0500 (0:00:00.089)       0:01:16.000 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 20 January 2025  06:25:20 -0500 (0:00:00.094)       0:01:16.095 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Monday 20 January 2025  06:25:20 -0500 (0:00:00.118)       0:01:16.213 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 20 January 2025  06:25:20 -0500 (0:00:00.047)       0:01:16.261 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 20 January 2025  06:25:21 -0500 (0:00:00.080)       0:01:16.341 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 20 January 2025  06:25:21 -0500 (0:00:00.073)       0:01:16.415 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372307.0428734,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737372307.0428734,
        "dev": 5,
        "device_type": 64768,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 9853,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1737372307.0428734,
        "nlink": 1,
        "path": "/dev/mapper/foo-test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 20 January 2025  06:25:21 -0500 (0:00:00.462)       0:01:16.878 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 20 January 2025  06:25:21 -0500 (0:00:00.064)       0:01:16.942 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 20 January 2025  06:25:21 -0500 (0:00:00.057)       0:01:17.000 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 20 January 2025  06:25:21 -0500 (0:00:00.048)       0:01:17.049 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "lvm"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 20 January 2025  06:25:21 -0500 (0:00:00.039)       0:01:17.089 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 20 January 2025  06:25:21 -0500 (0:00:00.029)       0:01:17.118 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 20 January 2025  06:25:21 -0500 (0:00:00.051)       0:01:17.170 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 20 January 2025  06:25:21 -0500 (0:00:00.044)       0:01:17.215 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 20 January 2025  06:25:23 -0500 (0:00:01.423)       0:01:18.638 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 20 January 2025  06:25:23 -0500 (0:00:00.040)       0:01:18.678 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 20 January 2025  06:25:23 -0500 (0:00:00.035)       0:01:18.714 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 20 January 2025  06:25:23 -0500 (0:00:00.081)       0:01:18.795 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 20 January 2025  06:25:23 -0500 (0:00:00.037)       0:01:18.833 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 20 January 2025  06:25:23 -0500 (0:00:00.037)       0:01:18.870 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 20 January 2025  06:25:23 -0500 (0:00:00.104)       0:01:18.975 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 20 January 2025  06:25:23 -0500 (0:00:00.041)       0:01:19.017 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 20 January 2025  06:25:23 -0500 (0:00:00.036)       0:01:19.054 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 20 January 2025  06:25:23 -0500 (0:00:00.084)       0:01:19.138 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 20 January 2025  06:25:23 -0500 (0:00:00.074)       0:01:19.213 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 20 January 2025  06:25:23 -0500 (0:00:00.085)       0:01:19.298 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 20 January 2025  06:25:24 -0500 (0:00:00.085)       0:01:19.383 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 20 January 2025  06:25:24 -0500 (0:00:00.071)       0:01:19.455 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 20 January 2025  06:25:24 -0500 (0:00:00.041)       0:01:19.497 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 20 January 2025  06:25:24 -0500 (0:00:00.036)       0:01:19.534 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 20 January 2025  06:25:24 -0500 (0:00:00.036)       0:01:19.570 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 20 January 2025  06:25:24 -0500 (0:00:00.033)       0:01:19.604 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 20 January 2025  06:25:24 -0500 (0:00:00.041)       0:01:19.645 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 20 January 2025  06:25:24 -0500 (0:00:00.038)       0:01:19.683 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 20 January 2025  06:25:24 -0500 (0:00:00.037)       0:01:19.720 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 20 January 2025  06:25:24 -0500 (0:00:00.036)       0:01:19.757 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 20 January 2025  06:25:24 -0500 (0:00:00.036)       0:01:19.793 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 20 January 2025  06:25:24 -0500 (0:00:00.036)       0:01:19.830 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 20 January 2025  06:25:24 -0500 (0:00:00.040)       0:01:19.870 ******** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 20 January 2025  06:25:24 -0500 (0:00:00.423)       0:01:20.294 ******** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 20 January 2025  06:25:25 -0500 (0:00:00.437)       0:01:20.732 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_expected_size": "4294967296"
    },
    "changed": false
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 20 January 2025  06:25:25 -0500 (0:00:00.056)       0:01:20.788 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 20 January 2025  06:25:25 -0500 (0:00:00.029)       0:01:20.818 ******** 
ok: [managed-node3] => {
    "bytes": 10726680821,
    "changed": false,
    "lvm": "9g",
    "parted": "9GiB",
    "size": "9 GiB"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 20 January 2025  06:25:25 -0500 (0:00:00.392)       0:01:21.211 ******** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 20 January 2025  06:25:25 -0500 (0:00:00.098)       0:01:21.310 ******** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 20 January 2025  06:25:26 -0500 (0:00:00.104)       0:01:21.414 ******** 
skipping: [managed-node3] => {
    "false_condition": "'%' in storage_test_volume.size | string"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 20 January 2025  06:25:26 -0500 (0:00:00.096)       0:01:21.510 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'%' in storage_test_volume.size | string",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 20 January 2025  06:25:26 -0500 (0:00:00.074)       0:01:21.585 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 20 January 2025  06:25:26 -0500 (0:00:00.030)       0:01:21.616 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 20 January 2025  06:25:26 -0500 (0:00:00.028)       0:01:21.644 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 20 January 2025  06:25:26 -0500 (0:00:00.029)       0:01:21.674 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 20 January 2025  06:25:26 -0500 (0:00:00.021)       0:01:21.695 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 20 January 2025  06:25:26 -0500 (0:00:00.021)       0:01:21.717 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 20 January 2025  06:25:26 -0500 (0:00:00.021)       0:01:21.738 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 20 January 2025  06:25:26 -0500 (0:00:00.021)       0:01:21.760 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 20 January 2025  06:25:26 -0500 (0:00:00.021)       0:01:21.781 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 20 January 2025  06:25:26 -0500 (0:00:00.023)       0:01:21.804 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 20 January 2025  06:25:26 -0500 (0:00:00.021)       0:01:21.826 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 20 January 2025  06:25:26 -0500 (0:00:00.021)       0:01:21.848 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 20 January 2025  06:25:26 -0500 (0:00:00.025)       0:01:21.873 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 20 January 2025  06:25:26 -0500 (0:00:00.034)       0:01:21.908 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 20 January 2025  06:25:26 -0500 (0:00:00.034)       0:01:21.942 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 20 January 2025  06:25:26 -0500 (0:00:00.037)       0:01:21.980 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 20 January 2025  06:25:26 -0500 (0:00:00.032)       0:01:22.012 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 20 January 2025  06:25:26 -0500 (0:00:00.031)       0:01:22.044 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 20 January 2025  06:25:26 -0500 (0:00:00.060)       0:01:22.104 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "lvs",
        "--noheadings",
        "--nameprefixes",
        "--units=b",
        "--nosuffix",
        "--unquoted",
        "-o",
        "name,attr,cache_total_blocks,chunk_size,segtype",
        "foo/test1"
    ],
    "delta": "0:00:00.028329",
    "end": "2025-01-20 06:25:27.101592",
    "rc": 0,
    "start": "2025-01-20 06:25:27.073263"
}

STDOUT:

  LVM2_LV_NAME=test1 LVM2_LV_ATTR=-wi-ao---- LVM2_CACHE_TOTAL_BLOCKS= LVM2_CHUNK_SIZE=0 LVM2_SEGTYPE=linear

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 20 January 2025  06:25:27 -0500 (0:00:00.403)       0:01:22.508 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_lv_segtype": [
            "linear"
        ]
    },
    "changed": false
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 20 January 2025  06:25:27 -0500 (0:00:00.080)       0:01:22.588 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 20 January 2025  06:25:27 -0500 (0:00:00.089)       0:01:22.677 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 20 January 2025  06:25:27 -0500 (0:00:00.066)       0:01:22.743 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 20 January 2025  06:25:27 -0500 (0:00:00.079)       0:01:22.823 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 20 January 2025  06:25:27 -0500 (0:00:00.072)       0:01:22.895 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.cached | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Monday 20 January 2025  06:25:27 -0500 (0:00:00.080)       0:01:22.976 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Monday 20 January 2025  06:25:27 -0500 (0:00:00.040)       0:01:23.016 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Monday 20 January 2025  06:25:27 -0500 (0:00:00.039)       0:01:23.056 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Test for correct handling resize large size] *****************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:116
Monday 20 January 2025  06:25:27 -0500 (0:00:00.040)       0:01:23.096 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node3

TASK [Store global variable value copy] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4
Monday 20 January 2025  06:25:27 -0500 (0:00:00.088)       0:01:23.184 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": false,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10
Monday 20 January 2025  06:25:27 -0500 (0:00:00.081)       0:01:23.266 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:25:28 -0500 (0:00:00.066)       0:01:23.333 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:25:28 -0500 (0:00:00.059)       0:01:23.392 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:25:28 -0500 (0:00:00.076)       0:01:23.469 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:25:28 -0500 (0:00:00.181)       0:01:23.650 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:25:28 -0500 (0:00:00.181)       0:01:23.831 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:25:28 -0500 (0:00:00.070)       0:01:23.902 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:25:28 -0500 (0:00:00.069)       0:01:23.972 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:25:28 -0500 (0:00:00.035)       0:01:24.007 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:25:28 -0500 (0:00:00.090)       0:01:24.098 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:25:28 -0500 (0:00:00.039)       0:01:24.138 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "type": "lvm",
            "volumes": [
                {
                    "fs_type": "ext4",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "12884901888.0"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:25:28 -0500 (0:00:00.057)       0:01:24.195 ******** 
ok: [managed-node3] => {
    "storage_volumes": []
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:25:28 -0500 (0:00:00.052)       0:01:24.248 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:25:28 -0500 (0:00:00.027)       0:01:24.276 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:25:28 -0500 (0:00:00.027)       0:01:24.303 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:25:29 -0500 (0:00:00.041)       0:01:24.345 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:25:29 -0500 (0:00:00.054)       0:01:24.399 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:25:29 -0500 (0:00:00.111)       0:01:24.511 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:25:29 -0500 (0:00:00.048)       0:01:24.559 ******** 
fatal: [managed-node3]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

volume 'test1' cannot be resized to '12 GiB'

TASK [fedora.linux_system_roles.storage : Failed message] **********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:109
Monday 20 January 2025  06:25:30 -0500 (0:00:01.343)       0:01:25.903 ******** 
fatal: [managed-node3]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "volume 'test1' cannot be resized to '12 GiB'", 'invocation': {'module_args': {'pools': [{'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'lvm', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '12884901888.0', 'state': 'present', 'type': 'lvm', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None}]}], 'volumes': [], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': False, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:25:30 -0500 (0:00:00.068)       0:01:25.971 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23
Monday 20 January 2025  06:25:30 -0500 (0:00:00.021)       0:01:25.993 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28
Monday 20 January 2025  06:25:30 -0500 (0:00:00.038)       0:01:26.031 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39
Monday 20 January 2025  06:25:30 -0500 (0:00:00.050)       0:01:26.081 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_failed_exception is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Remove the volume group created above] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:134
Monday 20 January 2025  06:25:30 -0500 (0:00:00.040)       0:01:26.122 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:25:30 -0500 (0:00:00.082)       0:01:26.204 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:25:30 -0500 (0:00:00.031)       0:01:26.235 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:25:30 -0500 (0:00:00.047)       0:01:26.283 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:25:31 -0500 (0:00:00.069)       0:01:26.354 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:25:31 -0500 (0:00:00.043)       0:01:26.398 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:25:31 -0500 (0:00:00.044)       0:01:26.442 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:25:31 -0500 (0:00:00.039)       0:01:26.481 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:25:31 -0500 (0:00:00.038)       0:01:26.520 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:25:31 -0500 (0:00:00.089)       0:01:26.609 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:25:31 -0500 (0:00:00.041)       0:01:26.651 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "type": "lvm"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:25:31 -0500 (0:00:00.045)       0:01:26.697 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:25:31 -0500 (0:00:00.061)       0:01:26.758 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:25:31 -0500 (0:00:00.047)       0:01:26.806 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:25:31 -0500 (0:00:00.047)       0:01:26.854 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:25:31 -0500 (0:00:00.055)       0:01:26.909 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:25:31 -0500 (0:00:00.073)       0:01:26.983 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:25:31 -0500 (0:00:00.118)       0:01:27.102 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:25:31 -0500 (0:00:00.047)       0:01:27.150 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/mapper/foo-test1",
            "fs_type": "ext4"
        },
        {
            "action": "destroy device",
            "device": "/dev/mapper/foo-test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "lvmpv"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "fstype": "ext4",
            "path": "/opt/test1",
            "src": "/dev/mapper/foo-test1",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "lvm",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:25:33 -0500 (0:00:01.745)       0:01:28.895 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:25:33 -0500 (0:00:00.067)       0:01:28.963 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372309.9908767,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "c32bf202c7b2a23432ca8d6114913e6720bfb078",
        "ctime": 1737372309.9878767,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372309.9878767,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1417,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:25:34 -0500 (0:00:00.439)       0:01:29.402 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:25:34 -0500 (0:00:00.513)       0:01:29.915 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:25:34 -0500 (0:00:00.058)       0:01:29.974 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/mapper/foo-test1",
                "fs_type": "ext4"
            },
            {
                "action": "destroy device",
                "device": "/dev/mapper/foo-test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "lvmpv"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "fstype": "ext4",
                "path": "/opt/test1",
                "src": "/dev/mapper/foo-test1",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:25:34 -0500 (0:00:00.080)       0:01:30.054 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "lvm",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:25:34 -0500 (0:00:00.050)       0:01:30.105 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:25:34 -0500 (0:00:00.054)       0:01:30.159 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': '/dev/mapper/foo-test1', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'ext4'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "fstype": "ext4",
        "path": "/opt/test1",
        "src": "/dev/mapper/foo-test1",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "/dev/mapper/foo-test1"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:25:35 -0500 (0:00:00.442)       0:01:30.602 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:25:35 -0500 (0:00:00.683)       0:01:31.285 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:25:36 -0500 (0:00:00.065)       0:01:31.350 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:25:36 -0500 (0:00:00.051)       0:01:31.402 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:25:36 -0500 (0:00:00.738)       0:01:32.140 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:25:37 -0500 (0:00:00.487)       0:01:32.627 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:25:37 -0500 (0:00:00.052)       0:01:32.680 ******** 
ok: [managed-node3]

TASK [Create one partition on one disk] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:144
Monday 20 January 2025  06:25:38 -0500 (0:00:00.958)       0:01:33.639 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:25:38 -0500 (0:00:00.190)       0:01:33.830 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:25:38 -0500 (0:00:00.053)       0:01:33.883 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:25:38 -0500 (0:00:00.072)       0:01:33.955 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:25:38 -0500 (0:00:00.089)       0:01:34.045 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:25:38 -0500 (0:00:00.048)       0:01:34.094 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:25:38 -0500 (0:00:00.057)       0:01:34.151 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:25:38 -0500 (0:00:00.038)       0:01:34.189 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:25:38 -0500 (0:00:00.040)       0:01:34.230 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:25:38 -0500 (0:00:00.083)       0:01:34.314 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:25:39 -0500 (0:00:00.043)       0:01:34.357 ******** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "type": "partition",
            "volumes": [
                {
                    "fs_type": "ext4",
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "type": "partition"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:25:39 -0500 (0:00:00.045)       0:01:34.403 ******** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:25:39 -0500 (0:00:00.065)       0:01:34.468 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:25:39 -0500 (0:00:00.050)       0:01:34.519 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:25:39 -0500 (0:00:00.052)       0:01:34.571 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:25:39 -0500 (0:00:00.046)       0:01:34.620 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:25:39 -0500 (0:00:00.052)       0:01:34.673 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:25:39 -0500 (0:00:00.124)       0:01:34.798 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:25:39 -0500 (0:00:00.038)       0:01:34.836 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "disklabel"
        },
        {
            "action": "create device",
            "device": "/dev/sda1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/sda1",
            "fs_type": "ext4"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/sda1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "ext4",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "partition",
            "volumes": [
                {
                    "_device": "/dev/sda1",
                    "_kernel_device": "/dev/sda1",
                    "_mount_id": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
                    "_raw_device": "/dev/sda1",
                    "_raw_kernel_device": "/dev/sda1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "ext4",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 0,
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "partition",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:25:41 -0500 (0:00:01.706)       0:01:36.543 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:25:41 -0500 (0:00:00.065)       0:01:36.609 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372335.769906,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "85170829302ce3d8b3d8f3031aface16e161cfd6",
        "ctime": 1737372335.2179053,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372335.2179053,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:25:41 -0500 (0:00:00.415)       0:01:37.025 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:25:42 -0500 (0:00:00.439)       0:01:37.464 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:25:42 -0500 (0:00:00.039)       0:01:37.503 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "disklabel"
            },
            {
                "action": "create device",
                "device": "/dev/sda1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/sda1",
                "fs_type": "ext4"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/sda1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "ext4",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "partition",
                "volumes": [
                    {
                        "_device": "/dev/sda1",
                        "_kernel_device": "/dev/sda1",
                        "_mount_id": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
                        "_raw_device": "/dev/sda1",
                        "_raw_kernel_device": "/dev/sda1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "ext4",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 0,
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "partition",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:25:42 -0500 (0:00:00.050)       0:01:37.554 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "partition",
                "volumes": [
                    {
                        "_device": "/dev/sda1",
                        "_kernel_device": "/dev/sda1",
                        "_mount_id": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
                        "_raw_device": "/dev/sda1",
                        "_raw_kernel_device": "/dev/sda1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "ext4",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": 0,
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "partition",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:25:42 -0500 (0:00:00.048)       0:01:37.603 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:25:42 -0500 (0:00:00.046)       0:01:37.650 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:25:42 -0500 (0:00:00.102)       0:01:37.752 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:25:43 -0500 (0:00:00.761)       0:01:38.514 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:25:43 -0500 (0:00:00.472)       0:01:38.986 ******** 
skipping: [managed-node3] => (item={'src': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:25:43 -0500 (0:00:00.094)       0:01:39.080 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:25:44 -0500 (0:00:00.765)       0:01:39.846 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:25:44 -0500 (0:00:00.384)       0:01:40.231 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:25:44 -0500 (0:00:00.041)       0:01:40.272 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:158
Monday 20 January 2025  06:25:45 -0500 (0:00:00.943)       0:01:41.216 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Monday 20 January 2025  06:25:45 -0500 (0:00:00.069)       0:01:41.285 ******** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "partition",
            "volumes": [
                {
                    "_device": "/dev/sda1",
                    "_kernel_device": "/dev/sda1",
                    "_mount_id": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
                    "_raw_device": "/dev/sda1",
                    "_raw_kernel_device": "/dev/sda1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "ext4",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": 0,
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "partition",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Monday 20 January 2025  06:25:46 -0500 (0:00:00.060)       0:01:41.346 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Monday 20 January 2025  06:25:46 -0500 (0:00:00.044)       0:01:41.390 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sda1": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/sda1",
            "size": "10G",
            "type": "partition",
            "uuid": "81f241aa-d706-49d3-803e-e3f4583bcb91"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "67db83fb-317b-4d3f-873e-b15c4c94e41e"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Monday 20 January 2025  06:25:46 -0500 (0:00:00.385)       0:01:41.775 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002963",
    "end": "2025-01-20 06:25:46.783460",
    "rc": 0,
    "start": "2025-01-20 06:25:46.780497"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:17 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=67db83fb-317b-4d3f-873e-b15c4c94e41e /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=81f241aa-d706-49d3-803e-e3f4583bcb91 /opt/test1 ext4 defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Monday 20 January 2025  06:25:46 -0500 (0:00:00.427)       0:01:42.203 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003064",
    "end": "2025-01-20 06:25:47.322261",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-20 06:25:47.319197"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Monday 20 January 2025  06:25:47 -0500 (0:00:00.533)       0:01:42.736 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'partition', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 0, 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Monday 20 January 2025  06:25:47 -0500 (0:00:00.194)       0:01:42.931 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Monday 20 January 2025  06:25:47 -0500 (0:00:00.071)       0:01:43.003 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Monday 20 January 2025  06:25:47 -0500 (0:00:00.082)       0:01:43.085 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Monday 20 January 2025  06:25:47 -0500 (0:00:00.064)       0:01:43.150 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Monday 20 January 2025  06:25:47 -0500 (0:00:00.153)       0:01:43.304 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Monday 20 January 2025  06:25:48 -0500 (0:00:00.058)       0:01:43.362 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Monday 20 January 2025  06:25:48 -0500 (0:00:00.035)       0:01:43.398 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Monday 20 January 2025  06:25:48 -0500 (0:00:00.062)       0:01:43.460 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Monday 20 January 2025  06:25:48 -0500 (0:00:00.076)       0:01:43.536 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Monday 20 January 2025  06:25:48 -0500 (0:00:00.054)       0:01:43.591 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Monday 20 January 2025  06:25:48 -0500 (0:00:00.064)       0:01:43.655 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Monday 20 January 2025  06:25:48 -0500 (0:00:00.156)       0:01:43.811 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Monday 20 January 2025  06:25:48 -0500 (0:00:00.067)       0:01:43.878 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Monday 20 January 2025  06:25:48 -0500 (0:00:00.056)       0:01:43.936 ******** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDOUT:


** (process:321709): WARNING **: 06:25:48.969: failed to load module nvme: libbd_nvme.so.2: cannot open shared object file: No such file or directory



STDERR:

OpenSSH_8.7p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.9.60 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.9.60 originally 10.31.9.60
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.9.60 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Monday 20 January 2025  06:25:49 -0500 (0:00:00.528)       0:01:44.470 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Monday 20 January 2025  06:25:49 -0500 (0:00:00.123)       0:01:44.594 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Monday 20 January 2025  06:25:49 -0500 (0:00:00.162)       0:01:44.757 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Monday 20 January 2025  06:25:49 -0500 (0:00:00.050)       0:01:44.808 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Monday 20 January 2025  06:25:49 -0500 (0:00:00.068)       0:01:44.876 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Monday 20 January 2025  06:25:49 -0500 (0:00:00.060)       0:01:44.941 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Monday 20 January 2025  06:25:49 -0500 (0:00:00.055)       0:01:44.997 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Monday 20 January 2025  06:25:49 -0500 (0:00:00.041)       0:01:45.039 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Monday 20 January 2025  06:25:49 -0500 (0:00:00.036)       0:01:45.075 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Monday 20 January 2025  06:25:49 -0500 (0:00:00.043)       0:01:45.119 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Monday 20 January 2025  06:25:49 -0500 (0:00:00.054)       0:01:45.173 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Monday 20 January 2025  06:25:49 -0500 (0:00:00.050)       0:01:45.224 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Monday 20 January 2025  06:25:49 -0500 (0:00:00.050)       0:01:45.275 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Monday 20 January 2025  06:25:49 -0500 (0:00:00.045)       0:01:45.320 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Monday 20 January 2025  06:25:50 -0500 (0:00:00.090)       0:01:45.411 ******** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 0, 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/sda1",
        "_kernel_device": "/dev/sda1",
        "_mount_id": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
        "_raw_device": "/dev/sda1",
        "_raw_kernel_device": "/dev/sda1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "ext4",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": 0,
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "partition",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Monday 20 January 2025  06:25:50 -0500 (0:00:00.046)       0:01:45.457 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Monday 20 January 2025  06:25:50 -0500 (0:00:00.094)       0:01:45.552 ******** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 0, 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/sda1",
        "_kernel_device": "/dev/sda1",
        "_mount_id": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
        "_raw_device": "/dev/sda1",
        "_raw_kernel_device": "/dev/sda1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "ext4",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": 0,
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "partition",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Monday 20 January 2025  06:25:50 -0500 (0:00:00.046)       0:01:45.599 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Monday 20 January 2025  06:25:50 -0500 (0:00:00.074)       0:01:45.673 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Monday 20 January 2025  06:25:50 -0500 (0:00:00.102)       0:01:45.775 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Monday 20 January 2025  06:25:50 -0500 (0:00:00.060)       0:01:45.835 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Monday 20 January 2025  06:25:50 -0500 (0:00:00.056)       0:01:45.891 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Monday 20 January 2025  06:25:50 -0500 (0:00:00.048)       0:01:45.940 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Monday 20 January 2025  06:25:50 -0500 (0:00:00.076)       0:01:46.016 ******** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 0, 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/sda1",
        "_kernel_device": "/dev/sda1",
        "_mount_id": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
        "_raw_device": "/dev/sda1",
        "_raw_kernel_device": "/dev/sda1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "ext4",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": 0,
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "partition",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Monday 20 January 2025  06:25:50 -0500 (0:00:00.043)       0:01:46.060 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Monday 20 January 2025  06:25:50 -0500 (0:00:00.069)       0:01:46.130 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Monday 20 January 2025  06:25:50 -0500 (0:00:00.022)       0:01:46.152 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Monday 20 January 2025  06:25:50 -0500 (0:00:00.020)       0:01:46.173 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Monday 20 January 2025  06:25:50 -0500 (0:00:00.020)       0:01:46.194 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Monday 20 January 2025  06:25:50 -0500 (0:00:00.024)       0:01:46.218 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'stratis'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Monday 20 January 2025  06:25:50 -0500 (0:00:00.021)       0:01:46.239 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Monday 20 January 2025  06:25:50 -0500 (0:00:00.024)       0:01:46.264 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Monday 20 January 2025  06:25:50 -0500 (0:00:00.033)       0:01:46.298 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 0, 'state': 'present', 'type': 'partition', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/sda1', '_raw_device': '/dev/sda1', '_mount_id': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', '_kernel_device': '/dev/sda1', '_raw_kernel_device': '/dev/sda1'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Monday 20 January 2025  06:25:51 -0500 (0:00:00.081)       0:01:46.380 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Monday 20 January 2025  06:25:51 -0500 (0:00:00.124)       0:01:46.505 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 20 January 2025  06:25:51 -0500 (0:00:00.288)       0:01:46.793 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 20 January 2025  06:25:51 -0500 (0:00:00.063)       0:01:46.856 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 20 January 2025  06:25:51 -0500 (0:00:00.067)       0:01:46.923 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 20 January 2025  06:25:51 -0500 (0:00:00.032)       0:01:46.956 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 20 January 2025  06:25:51 -0500 (0:00:00.031)       0:01:46.988 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 20 January 2025  06:25:51 -0500 (0:00:00.021)       0:01:47.010 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 20 January 2025  06:25:51 -0500 (0:00:00.022)       0:01:47.032 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 20 January 2025  06:25:51 -0500 (0:00:00.022)       0:01:47.055 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 20 January 2025  06:25:51 -0500 (0:00:00.021)       0:01:47.077 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 20 January 2025  06:25:51 -0500 (0:00:00.024)       0:01:47.101 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 20 January 2025  06:25:51 -0500 (0:00:00.031)       0:01:47.133 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 20 January 2025  06:25:51 -0500 (0:00:00.036)       0:01:47.170 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 ext4 defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 20 January 2025  06:25:52 -0500 (0:00:00.255)       0:01:47.425 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 20 January 2025  06:25:52 -0500 (0:00:00.077)       0:01:47.502 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 20 January 2025  06:25:52 -0500 (0:00:00.074)       0:01:47.577 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 20 January 2025  06:25:52 -0500 (0:00:00.066)       0:01:47.643 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Monday 20 January 2025  06:25:52 -0500 (0:00:00.074)       0:01:47.718 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 20 January 2025  06:25:52 -0500 (0:00:00.037)       0:01:47.756 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 20 January 2025  06:25:52 -0500 (0:00:00.094)       0:01:47.850 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 20 January 2025  06:25:52 -0500 (0:00:00.095)       0:01:47.945 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372341.080912,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737372341.080912,
        "dev": 5,
        "device_type": 2049,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 9986,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737372341.080912,
        "nlink": 1,
        "path": "/dev/sda1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 20 January 2025  06:25:53 -0500 (0:00:00.434)       0:01:48.380 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 20 January 2025  06:25:53 -0500 (0:00:00.085)       0:01:48.466 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 20 January 2025  06:25:53 -0500 (0:00:00.053)       0:01:48.519 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 20 January 2025  06:25:53 -0500 (0:00:00.077)       0:01:48.597 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "partition"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 20 January 2025  06:25:53 -0500 (0:00:00.051)       0:01:48.649 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 20 January 2025  06:25:53 -0500 (0:00:00.044)       0:01:48.694 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 20 January 2025  06:25:53 -0500 (0:00:00.066)       0:01:48.760 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 20 January 2025  06:25:53 -0500 (0:00:00.048)       0:01:48.809 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 20 January 2025  06:25:54 -0500 (0:00:01.517)       0:01:50.327 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 20 January 2025  06:25:55 -0500 (0:00:00.055)       0:01:50.382 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 20 January 2025  06:25:55 -0500 (0:00:00.072)       0:01:50.455 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 20 January 2025  06:25:55 -0500 (0:00:00.111)       0:01:50.567 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 20 January 2025  06:25:55 -0500 (0:00:00.036)       0:01:50.604 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 20 January 2025  06:25:55 -0500 (0:00:00.060)       0:01:50.665 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 20 January 2025  06:25:55 -0500 (0:00:00.049)       0:01:50.714 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 20 January 2025  06:25:55 -0500 (0:00:00.047)       0:01:50.762 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 20 January 2025  06:25:55 -0500 (0:00:00.062)       0:01:50.825 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 20 January 2025  06:25:55 -0500 (0:00:00.137)       0:01:50.962 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 20 January 2025  06:25:55 -0500 (0:00:00.142)       0:01:51.104 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 20 January 2025  06:25:55 -0500 (0:00:00.088)       0:01:51.192 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 20 January 2025  06:25:55 -0500 (0:00:00.085)       0:01:51.278 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 20 January 2025  06:25:56 -0500 (0:00:00.102)       0:01:51.380 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 20 January 2025  06:25:56 -0500 (0:00:00.053)       0:01:51.434 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 20 January 2025  06:25:56 -0500 (0:00:00.038)       0:01:51.473 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 20 January 2025  06:25:56 -0500 (0:00:00.034)       0:01:51.507 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 20 January 2025  06:25:56 -0500 (0:00:00.056)       0:01:51.563 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 20 January 2025  06:25:56 -0500 (0:00:00.039)       0:01:51.603 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 20 January 2025  06:25:56 -0500 (0:00:00.059)       0:01:51.662 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 20 January 2025  06:25:56 -0500 (0:00:00.045)       0:01:51.708 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 20 January 2025  06:25:56 -0500 (0:00:00.041)       0:01:51.749 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 20 January 2025  06:25:56 -0500 (0:00:00.065)       0:01:51.814 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 20 January 2025  06:25:56 -0500 (0:00:00.068)       0:01:51.883 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 20 January 2025  06:25:56 -0500 (0:00:00.070)       0:01:51.954 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 20 January 2025  06:25:56 -0500 (0:00:00.144)       0:01:52.098 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 20 January 2025  06:25:56 -0500 (0:00:00.129)       0:01:52.227 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 20 January 2025  06:25:56 -0500 (0:00:00.073)       0:01:52.301 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 20 January 2025  06:25:57 -0500 (0:00:00.044)       0:01:52.345 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 20 January 2025  06:25:57 -0500 (0:00:00.075)       0:01:52.420 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 20 January 2025  06:25:57 -0500 (0:00:00.064)       0:01:52.485 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 20 January 2025  06:25:57 -0500 (0:00:00.068)       0:01:52.553 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 20 January 2025  06:25:57 -0500 (0:00:00.073)       0:01:52.626 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 20 January 2025  06:25:57 -0500 (0:00:00.065)       0:01:52.692 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 20 January 2025  06:25:57 -0500 (0:00:00.034)       0:01:52.726 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 20 January 2025  06:25:57 -0500 (0:00:00.033)       0:01:52.759 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 20 January 2025  06:25:57 -0500 (0:00:00.032)       0:01:52.792 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 20 January 2025  06:25:57 -0500 (0:00:00.034)       0:01:52.827 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 20 January 2025  06:25:57 -0500 (0:00:00.035)       0:01:52.863 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 20 January 2025  06:25:57 -0500 (0:00:00.036)       0:01:52.899 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 20 January 2025  06:25:57 -0500 (0:00:00.035)       0:01:52.934 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 20 January 2025  06:25:57 -0500 (0:00:00.035)       0:01:52.969 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 20 January 2025  06:25:57 -0500 (0:00:00.034)       0:01:53.004 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 20 January 2025  06:25:57 -0500 (0:00:00.031)       0:01:53.035 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 20 January 2025  06:25:57 -0500 (0:00:00.039)       0:01:53.075 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 20 January 2025  06:25:57 -0500 (0:00:00.036)       0:01:53.111 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 20 January 2025  06:25:57 -0500 (0:00:00.037)       0:01:53.148 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 20 January 2025  06:25:57 -0500 (0:00:00.041)       0:01:53.190 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 20 January 2025  06:25:57 -0500 (0:00:00.049)       0:01:53.239 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 20 January 2025  06:25:57 -0500 (0:00:00.042)       0:01:53.282 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 20 January 2025  06:25:57 -0500 (0:00:00.042)       0:01:53.324 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 20 January 2025  06:25:58 -0500 (0:00:00.224)       0:01:53.549 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 20 January 2025  06:25:58 -0500 (0:00:00.037)       0:01:53.586 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 20 January 2025  06:25:58 -0500 (0:00:00.039)       0:01:53.626 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 20 January 2025  06:25:58 -0500 (0:00:00.033)       0:01:53.659 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 20 January 2025  06:25:58 -0500 (0:00:00.037)       0:01:53.697 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 20 January 2025  06:25:58 -0500 (0:00:00.033)       0:01:53.730 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 20 January 2025  06:25:58 -0500 (0:00:00.041)       0:01:53.771 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Monday 20 January 2025  06:25:58 -0500 (0:00:00.033)       0:01:53.805 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Monday 20 January 2025  06:25:58 -0500 (0:00:00.030)       0:01:53.835 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Monday 20 January 2025  06:25:58 -0500 (0:00:00.020)       0:01:53.856 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Test setting up disk volume will remove the partition create above] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:161
Monday 20 January 2025  06:25:58 -0500 (0:00:00.031)       0:01:53.887 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:25:58 -0500 (0:00:00.100)       0:01:53.988 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:25:58 -0500 (0:00:00.033)       0:01:54.022 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:25:58 -0500 (0:00:00.051)       0:01:54.073 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:25:58 -0500 (0:00:00.057)       0:01:54.130 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:25:58 -0500 (0:00:00.027)       0:01:54.158 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:25:58 -0500 (0:00:00.027)       0:01:54.186 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:25:58 -0500 (0:00:00.024)       0:01:54.210 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:25:58 -0500 (0:00:00.024)       0:01:54.235 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:25:58 -0500 (0:00:00.068)       0:01:54.303 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:25:59 -0500 (0:00:00.028)       0:01:54.331 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:25:59 -0500 (0:00:00.026)       0:01:54.358 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "fs_create_options": "-F",
            "fs_type": "ext4",
            "mount_options": "rw,noatime,defaults",
            "mount_point": "/opt/test1",
            "name": "foo",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:25:59 -0500 (0:00:00.029)       0:01:54.388 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:25:59 -0500 (0:00:00.028)       0:01:54.416 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:25:59 -0500 (0:00:00.028)       0:01:54.444 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:25:59 -0500 (0:00:00.027)       0:01:54.472 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:25:59 -0500 (0:00:00.027)       0:01:54.499 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:25:59 -0500 (0:00:00.062)       0:01:54.562 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:25:59 -0500 (0:00:00.023)       0:01:54.585 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/sda1",
            "fs_type": "ext4"
        },
        {
            "action": "destroy device",
            "device": "/dev/sda1",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "disklabel"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "ext4"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "fstype": "ext4",
            "path": "/opt/test1",
            "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
            "state": "absent"
        },
        {
            "dump": 0,
            "fstype": "ext4",
            "group": null,
            "mode": null,
            "opts": "rw,noatime,defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
            "state": "mounted"
        }
    ],
    "packages": [
        "xfsprogs",
        "e2fsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "-F",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext4",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "rw,noatime,defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test1",
            "mount_user": null,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:26:00 -0500 (0:00:01.607)       0:01:56.193 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:26:00 -0500 (0:00:00.064)       0:01:56.257 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372343.574915,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "67f29dddd13c0ca4ecb83eb372d1445c1280612d",
        "ctime": 1737372343.571915,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372343.571915,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1437,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:26:01 -0500 (0:00:00.416)       0:01:56.674 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:26:01 -0500 (0:00:00.382)       0:01:57.056 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:26:01 -0500 (0:00:00.022)       0:01:57.079 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/sda1",
                "fs_type": "ext4"
            },
            {
                "action": "destroy device",
                "device": "/dev/sda1",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "disklabel"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "ext4"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "fstype": "ext4",
                "path": "/opt/test1",
                "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
                "state": "absent"
            },
            {
                "dump": 0,
                "fstype": "ext4",
                "group": null,
                "mode": null,
                "opts": "rw,noatime,defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
                "state": "mounted"
            }
        ],
        "packages": [
            "xfsprogs",
            "e2fsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "-F",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext4",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "rw,noatime,defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test1",
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:26:01 -0500 (0:00:00.040)       0:01:57.120 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:26:01 -0500 (0:00:00.030)       0:01:57.151 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_kernel_device": "/dev/sda",
                "_mount_id": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
                "_raw_device": "/dev/sda",
                "_raw_kernel_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "-F",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext4",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "rw,noatime,defaults",
                "mount_passno": 0,
                "mount_point": "/opt/test1",
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "present",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:26:01 -0500 (0:00:00.028)       0:01:57.179 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=81f241aa-d706-49d3-803e-e3f4583bcb91', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'ext4'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "fstype": "ext4",
        "path": "/opt/test1",
        "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=81f241aa-d706-49d3-803e-e3f4583bcb91"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:26:02 -0500 (0:00:00.409)       0:01:57.589 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:26:02 -0500 (0:00:00.690)       0:01:58.279 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=c42ba515-6947-46a6-9b95-281e4da487b4', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'rw,noatime,defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "rw,noatime,defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "rw,noatime,defaults",
    "passno": "0",
    "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:26:03 -0500 (0:00:00.416)       0:01:58.696 ******** 
skipping: [managed-node3] => (item={'src': 'UUID=c42ba515-6947-46a6-9b95-281e4da487b4', 'path': '/opt/test1', 'fstype': 'ext4', 'opts': 'rw,noatime,defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "ext4",
        "group": null,
        "mode": null,
        "opts": "rw,noatime,defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:26:03 -0500 (0:00:00.077)       0:01:58.773 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:26:04 -0500 (0:00:00.716)       0:01:59.490 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:26:04 -0500 (0:00:00.394)       0:01:59.884 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:26:04 -0500 (0:00:00.020)       0:01:59.905 ******** 
ok: [managed-node3]

TASK [Verify results] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:174
Monday 20 January 2025  06:26:05 -0500 (0:00:00.929)       0:02:00.834 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Monday 20 January 2025  06:26:05 -0500 (0:00:00.103)       0:02:00.938 ******** 
skipping: [managed-node3] => {
    "false_condition": "_storage_pools_list | length > 0"
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Monday 20 January 2025  06:26:05 -0500 (0:00:00.054)       0:02:00.992 ******** 
ok: [managed-node3] => {
    "_storage_volumes_list": [
        {
            "_device": "/dev/sda",
            "_kernel_device": "/dev/sda",
            "_mount_id": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
            "_raw_device": "/dev/sda",
            "_raw_kernel_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "-F",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext4",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "rw,noatime,defaults",
            "mount_passno": 0,
            "mount_point": "/opt/test1",
            "mount_user": null,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "present",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Monday 20 January 2025  06:26:05 -0500 (0:00:00.074)       0:02:01.067 ******** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "c42ba515-6947-46a6-9b95-281e4da487b4"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda1",
            "size": "250G",
            "type": "partition",
            "uuid": "67db83fb-317b-4d3f-873e-b15c4c94e41e"
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Monday 20 January 2025  06:26:06 -0500 (0:00:00.384)       0:02:01.451 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.002780",
    "end": "2025-01-20 06:26:06.478951",
    "rc": 0,
    "start": "2025-01-20 06:26:06.476171"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Fri Dec 20 07:21:17 2024
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=67db83fb-317b-4d3f-873e-b15c4c94e41e /                       xfs     defaults        0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=c42ba515-6947-46a6-9b95-281e4da487b4 /opt/test1 ext4 rw,noatime,defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Monday 20 January 2025  06:26:06 -0500 (0:00:00.411)       0:02:01.863 ******** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.002781",
    "end": "2025-01-20 06:26:06.934979",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-01-20 06:26:06.932198"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Monday 20 January 2025  06:26:07 -0500 (0:00:00.478)       0:02:02.341 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Monday 20 January 2025  06:26:07 -0500 (0:00:00.031)       0:02:02.373 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '-F', 'fs_label': '', 'fs_type': 'ext4', 'mount_options': 'rw,noatime,defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'foo', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': [], '_device': '/dev/sda', '_raw_device': '/dev/sda', '_mount_id': 'UUID=c42ba515-6947-46a6-9b95-281e4da487b4', '_kernel_device': '/dev/sda', '_raw_kernel_device': '/dev/sda'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Monday 20 January 2025  06:26:07 -0500 (0:00:00.251)       0:02:02.625 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Monday 20 January 2025  06:26:07 -0500 (0:00:00.060)       0:02:02.685 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Monday 20 January 2025  06:26:07 -0500 (0:00:00.235)       0:02:02.920 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/sda"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Monday 20 January 2025  06:26:07 -0500 (0:00:00.051)       0:02:02.972 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Monday 20 January 2025  06:26:07 -0500 (0:00:00.107)       0:02:03.080 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Monday 20 January 2025  06:26:07 -0500 (0:00:00.053)       0:02:03.134 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Monday 20 January 2025  06:26:07 -0500 (0:00:00.060)       0:02:03.194 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Monday 20 January 2025  06:26:07 -0500 (0:00:00.056)       0:02:03.251 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Monday 20 January 2025  06:26:07 -0500 (0:00:00.045)       0:02:03.296 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Monday 20 January 2025  06:26:08 -0500 (0:00:00.042)       0:02:03.338 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Monday 20 January 2025  06:26:08 -0500 (0:00:00.037)       0:02:03.376 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Monday 20 January 2025  06:26:08 -0500 (0:00:00.033)       0:02:03.409 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Monday 20 January 2025  06:26:08 -0500 (0:00:00.059)       0:02:03.469 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Monday 20 January 2025  06:26:08 -0500 (0:00:00.077)       0:02:03.546 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=c42ba515-6947-46a6-9b95-281e4da487b4 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 ext4 rw,noatime,defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Monday 20 January 2025  06:26:08 -0500 (0:00:00.189)       0:02:03.735 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Monday 20 January 2025  06:26:08 -0500 (0:00:00.099)       0:02:03.835 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Monday 20 January 2025  06:26:08 -0500 (0:00:00.095)       0:02:03.930 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Monday 20 January 2025  06:26:08 -0500 (0:00:00.086)       0:02:04.016 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Monday 20 January 2025  06:26:08 -0500 (0:00:00.101)       0:02:04.118 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Monday 20 January 2025  06:26:08 -0500 (0:00:00.048)       0:02:04.167 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Monday 20 January 2025  06:26:08 -0500 (0:00:00.098)       0:02:04.266 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Monday 20 January 2025  06:26:09 -0500 (0:00:00.093)       0:02:04.360 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372360.7529345,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1737372360.7529345,
        "dev": 5,
        "device_type": 2048,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 448,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/blockdevice",
        "mode": "0660",
        "mtime": 1737372360.7529345,
        "nlink": 1,
        "path": "/dev/sda",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Monday 20 January 2025  06:26:09 -0500 (0:00:00.455)       0:02:04.816 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Monday 20 January 2025  06:26:09 -0500 (0:00:00.087)       0:02:04.904 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Monday 20 January 2025  06:26:09 -0500 (0:00:00.077)       0:02:04.981 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Monday 20 January 2025  06:26:09 -0500 (0:00:00.066)       0:02:05.048 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "disk"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Monday 20 January 2025  06:26:09 -0500 (0:00:00.051)       0:02:05.100 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Monday 20 January 2025  06:26:09 -0500 (0:00:00.074)       0:02:05.175 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Monday 20 January 2025  06:26:09 -0500 (0:00:00.056)       0:02:05.232 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Monday 20 January 2025  06:26:09 -0500 (0:00:00.039)       0:02:05.271 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Monday 20 January 2025  06:26:11 -0500 (0:00:01.525)       0:02:06.796 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Monday 20 January 2025  06:26:11 -0500 (0:00:00.084)       0:02:06.881 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Monday 20 January 2025  06:26:11 -0500 (0:00:00.081)       0:02:06.962 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Monday 20 January 2025  06:26:11 -0500 (0:00:00.082)       0:02:07.044 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Monday 20 January 2025  06:26:11 -0500 (0:00:00.059)       0:02:07.104 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Monday 20 January 2025  06:26:11 -0500 (0:00:00.039)       0:02:07.143 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Monday 20 January 2025  06:26:11 -0500 (0:00:00.055)       0:02:07.199 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Monday 20 January 2025  06:26:11 -0500 (0:00:00.047)       0:02:07.247 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Monday 20 January 2025  06:26:11 -0500 (0:00:00.081)       0:02:07.329 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Monday 20 January 2025  06:26:12 -0500 (0:00:00.132)       0:02:07.462 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Monday 20 January 2025  06:26:12 -0500 (0:00:00.110)       0:02:07.572 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Monday 20 January 2025  06:26:12 -0500 (0:00:00.095)       0:02:07.668 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Monday 20 January 2025  06:26:12 -0500 (0:00:00.090)       0:02:07.759 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Monday 20 January 2025  06:26:12 -0500 (0:00:00.094)       0:02:07.853 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Monday 20 January 2025  06:26:12 -0500 (0:00:00.045)       0:02:07.898 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Monday 20 January 2025  06:26:12 -0500 (0:00:00.038)       0:02:07.937 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Monday 20 January 2025  06:26:12 -0500 (0:00:00.047)       0:02:07.985 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Monday 20 January 2025  06:26:12 -0500 (0:00:00.055)       0:02:08.040 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Monday 20 January 2025  06:26:12 -0500 (0:00:00.034)       0:02:08.075 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Monday 20 January 2025  06:26:12 -0500 (0:00:00.044)       0:02:08.120 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Monday 20 January 2025  06:26:12 -0500 (0:00:00.042)       0:02:08.163 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Monday 20 January 2025  06:26:12 -0500 (0:00:00.055)       0:02:08.218 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Monday 20 January 2025  06:26:12 -0500 (0:00:00.038)       0:02:08.257 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Monday 20 January 2025  06:26:12 -0500 (0:00:00.045)       0:02:08.302 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Monday 20 January 2025  06:26:13 -0500 (0:00:00.035)       0:02:08.338 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Monday 20 January 2025  06:26:13 -0500 (0:00:00.078)       0:02:08.416 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Monday 20 January 2025  06:26:13 -0500 (0:00:00.074)       0:02:08.491 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Monday 20 January 2025  06:26:13 -0500 (0:00:00.076)       0:02:08.567 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Monday 20 January 2025  06:26:13 -0500 (0:00:00.046)       0:02:08.613 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Monday 20 January 2025  06:26:13 -0500 (0:00:00.089)       0:02:08.703 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Monday 20 January 2025  06:26:13 -0500 (0:00:00.070)       0:02:08.774 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Monday 20 January 2025  06:26:13 -0500 (0:00:00.075)       0:02:08.849 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Monday 20 January 2025  06:26:13 -0500 (0:00:00.066)       0:02:08.916 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Monday 20 January 2025  06:26:13 -0500 (0:00:00.066)       0:02:08.982 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Monday 20 January 2025  06:26:13 -0500 (0:00:00.034)       0:02:09.017 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Monday 20 January 2025  06:26:13 -0500 (0:00:00.039)       0:02:09.057 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Monday 20 January 2025  06:26:13 -0500 (0:00:00.046)       0:02:09.103 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Monday 20 January 2025  06:26:13 -0500 (0:00:00.039)       0:02:09.143 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Monday 20 January 2025  06:26:13 -0500 (0:00:00.038)       0:02:09.182 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Monday 20 January 2025  06:26:13 -0500 (0:00:00.035)       0:02:09.217 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Monday 20 January 2025  06:26:13 -0500 (0:00:00.031)       0:02:09.249 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Monday 20 January 2025  06:26:13 -0500 (0:00:00.037)       0:02:09.286 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Monday 20 January 2025  06:26:13 -0500 (0:00:00.034)       0:02:09.321 ******** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Monday 20 January 2025  06:26:14 -0500 (0:00:00.120)       0:02:09.442 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Monday 20 January 2025  06:26:14 -0500 (0:00:00.036)       0:02:09.478 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Monday 20 January 2025  06:26:14 -0500 (0:00:00.035)       0:02:09.514 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Monday 20 January 2025  06:26:14 -0500 (0:00:00.038)       0:02:09.553 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Monday 20 January 2025  06:26:14 -0500 (0:00:00.037)       0:02:09.591 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Monday 20 January 2025  06:26:14 -0500 (0:00:00.035)       0:02:09.626 ******** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "storage_test_volume.type not in ['partition', 'disk']",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Monday 20 January 2025  06:26:14 -0500 (0:00:00.049)       0:02:09.675 ******** 
ok: [managed-node3] => {
    "storage_test_expected_size": "4294967296"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Monday 20 January 2025  06:26:14 -0500 (0:00:00.042)       0:02:09.718 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Monday 20 January 2025  06:26:14 -0500 (0:00:00.068)       0:02:09.786 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Monday 20 January 2025  06:26:14 -0500 (0:00:00.045)       0:02:09.832 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Monday 20 January 2025  06:26:14 -0500 (0:00:00.046)       0:02:09.879 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Monday 20 January 2025  06:26:14 -0500 (0:00:00.059)       0:02:09.939 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Monday 20 January 2025  06:26:14 -0500 (0:00:00.041)       0:02:09.980 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Monday 20 January 2025  06:26:14 -0500 (0:00:00.058)       0:02:10.038 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Monday 20 January 2025  06:26:14 -0500 (0:00:00.061)       0:02:10.100 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Monday 20 January 2025  06:26:14 -0500 (0:00:00.066)       0:02:10.166 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Monday 20 January 2025  06:26:14 -0500 (0:00:00.040)       0:02:10.207 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Remove the disk volume created above] ************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:179
Monday 20 January 2025  06:26:14 -0500 (0:00:00.051)       0:02:10.258 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:26:15 -0500 (0:00:00.219)       0:02:10.478 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:26:15 -0500 (0:00:00.114)       0:02:10.592 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:26:15 -0500 (0:00:00.107)       0:02:10.700 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:26:15 -0500 (0:00:00.173)       0:02:10.874 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:26:15 -0500 (0:00:00.094)       0:02:10.969 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:26:15 -0500 (0:00:00.090)       0:02:11.060 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:26:15 -0500 (0:00:00.063)       0:02:11.123 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:26:15 -0500 (0:00:00.078)       0:02:11.202 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:26:16 -0500 (0:00:00.144)       0:02:11.346 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:26:16 -0500 (0:00:00.064)       0:02:11.410 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:26:16 -0500 (0:00:00.071)       0:02:11.482 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "name": "foo",
            "state": "absent",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:26:16 -0500 (0:00:00.051)       0:02:11.534 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:26:16 -0500 (0:00:00.067)       0:02:11.602 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:26:16 -0500 (0:00:00.046)       0:02:11.649 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:26:16 -0500 (0:00:00.062)       0:02:11.711 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:26:16 -0500 (0:00:00.051)       0:02:11.763 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:26:16 -0500 (0:00:00.118)       0:02:11.881 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:26:16 -0500 (0:00:00.034)       0:02:11.916 ******** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "ext4"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [
        {
            "fstype": "ext4",
            "path": "/opt/test1",
            "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
            "state": "absent"
        }
    ],
    "packages": [
        "xfsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_mount_id": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "ext4",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:26:17 -0500 (0:00:01.225)       0:02:13.142 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:26:17 -0500 (0:00:00.097)       0:02:13.239 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372363.2959373,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "3a263cde13389823e5acf02c215ef10dae9a7727",
        "ctime": 1737372363.2929373,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372363.2929373,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1448,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:26:18 -0500 (0:00:00.495)       0:02:13.735 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:26:18 -0500 (0:00:00.498)       0:02:14.233 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:26:18 -0500 (0:00:00.052)       0:02:14.285 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "ext4"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [
            {
                "fstype": "ext4",
                "path": "/opt/test1",
                "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
                "state": "absent"
            }
        ],
        "packages": [
            "xfsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_mount_id": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext4",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:26:19 -0500 (0:00:00.062)       0:02:14.348 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:26:19 -0500 (0:00:00.052)       0:02:14.400 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_mount_id": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "ext4",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:26:19 -0500 (0:00:00.058)       0:02:14.459 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=c42ba515-6947-46a6-9b95-281e4da487b4', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'ext4'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "ext4",
    "mount_info": {
        "fstype": "ext4",
        "path": "/opt/test1",
        "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=c42ba515-6947-46a6-9b95-281e4da487b4"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:26:19 -0500 (0:00:00.513)       0:02:14.973 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:26:20 -0500 (0:00:00.872)       0:02:15.845 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:26:20 -0500 (0:00:00.154)       0:02:15.999 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:26:20 -0500 (0:00:00.129)       0:02:16.129 ******** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:26:21 -0500 (0:00:00.762)       0:02:16.891 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:26:21 -0500 (0:00:00.382)       0:02:17.274 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:26:21 -0500 (0:00:00.034)       0:02:17.308 ******** 
ok: [managed-node3]

TASK [Get unused disks for swap] ***********************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:190
Monday 20 January 2025  06:26:22 -0500 (0:00:00.910)       0:02:18.219 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node3

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Monday 20 January 2025  06:26:22 -0500 (0:00:00.083)       0:02:18.302 ******** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Monday 20 January 2025  06:26:24 -0500 (0:00:01.528)       0:02:19.830 ******** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sda"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG_SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Monday 20 January 2025  06:26:25 -0500 (0:00:00.507)       0:02:20.338 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Monday 20 January 2025  06:26:25 -0500 (0:00:00.039)       0:02:20.377 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Monday 20 January 2025  06:26:25 -0500 (0:00:00.045)       0:02:20.423 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Monday 20 January 2025  06:26:25 -0500 (0:00:00.076)       0:02:20.499 ******** 
ok: [managed-node3] => {
    "unused_disks": [
        "sda"
    ]
}

TASK [Save disk used for swap] *************************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:198
Monday 20 January 2025  06:26:25 -0500 (0:00:00.042)       0:02:20.542 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__swap_disk": "sda"
    },
    "changed": false
}

TASK [Test for correct handling of mounting a non-mountable formatting type] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:202
Monday 20 January 2025  06:26:25 -0500 (0:00:00.049)       0:02:20.592 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml for managed-node3

TASK [Store global variable value copy] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:4
Monday 20 January 2025  06:26:25 -0500 (0:00:00.123)       0:02:20.715 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_pools_global": [],
        "storage_safe_mode_global": false,
        "storage_volumes_global": []
    },
    "changed": false
}

TASK [Verify role raises correct error] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:10
Monday 20 January 2025  06:26:25 -0500 (0:00:00.116)       0:02:20.831 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:26:25 -0500 (0:00:00.063)       0:02:20.894 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:26:25 -0500 (0:00:00.051)       0:02:20.946 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:26:25 -0500 (0:00:00.066)       0:02:21.012 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:26:25 -0500 (0:00:00.063)       0:02:21.076 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:26:25 -0500 (0:00:00.030)       0:02:21.106 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:26:25 -0500 (0:00:00.027)       0:02:21.134 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:26:25 -0500 (0:00:00.024)       0:02:21.158 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:26:25 -0500 (0:00:00.025)       0:02:21.184 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:26:25 -0500 (0:00:00.082)       0:02:21.267 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:26:25 -0500 (0:00:00.063)       0:02:21.330 ******** 
ok: [managed-node3] => {
    "storage_pools": []
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:26:26 -0500 (0:00:00.068)       0:02:21.398 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "fs_type": "swap",
            "mount_point": "/opt/test1",
            "name": "test1",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:26:26 -0500 (0:00:00.038)       0:02:21.437 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:26:26 -0500 (0:00:00.043)       0:02:21.481 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:26:26 -0500 (0:00:00.053)       0:02:21.535 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:26:26 -0500 (0:00:00.039)       0:02:21.574 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:26:26 -0500 (0:00:00.028)       0:02:21.603 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:26:26 -0500 (0:00:00.059)       0:02:21.662 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:26:26 -0500 (0:00:00.020)       0:02:21.682 ******** 
fatal: [managed-node3]: FAILED! => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

MSG:

volume 'test1' has a mount point but no mountable file system

TASK [fedora.linux_system_roles.storage : Failed message] **********************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:109
Monday 20 January 2025  06:26:27 -0500 (0:00:01.003)       0:02:22.686 ******** 
fatal: [managed-node3]: FAILED! => {
    "changed": false
}

MSG:

{'changed': False, 'actions': [], 'leaves': [], 'mounts': [], 'crypts': [], 'pools': [], 'volumes': [], 'packages': [], 'failed': True, 'msg': "volume 'test1' has a mount point but no mountable file system", 'invocation': {'module_args': {'pools': [], 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'swap', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': 10737418240, 'state': 'present', 'type': 'disk', 'disks': ['sda'], 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_stripe_size': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}], 'use_partitions': None, 'disklabel_type': None, 'pool_defaults': {'state': 'present', 'type': 'lvm', 'disks': [], 'volumes': [], 'grow_to_fill': False, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'shared': False}, 'volume_defaults': {'state': 'present', 'type': 'lvm', 'size': 0, 'disks': [], 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_stripe_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_password': None, 'encryption_key': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'compression': None, 'deduplication': None, 'vdo_pool_size': None, 'thin': None, 'thin_pool_name': None, 'thin_pool_size': None, 'cached': False, 'cache_size': 0, 'cache_mode': None, 'cache_devices': []}, 'safe_mode': False, 'packages_only': False, 'diskvolume_mkfs_option_map': {}}}, '_ansible_no_log': False}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:26:27 -0500 (0:00:00.058)       0:02:22.744 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that we failed in the role] ****************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:23
Monday 20 January 2025  06:26:27 -0500 (0:00:00.035)       0:02:22.780 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the blivet output and error message are correct] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:28
Monday 20 January 2025  06:26:27 -0500 (0:00:00.056)       0:02:22.837 ******** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify correct exception or error message] *******************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-failed.yml:39
Monday 20 January 2025  06:26:27 -0500 (0:00:00.093)       0:02:22.931 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_failed_exception is defined",
    "skip_reason": "Conditional result was False"
}

TASK [Remove the disk volume created above] ************************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:218
Monday 20 January 2025  06:26:27 -0500 (0:00:00.066)       0:02:22.998 ******** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Monday 20 January 2025  06:26:27 -0500 (0:00:00.206)       0:02:23.204 ******** 
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Monday 20 January 2025  06:26:27 -0500 (0:00:00.054)       0:02:23.259 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Monday 20 January 2025  06:26:28 -0500 (0:00:00.073)       0:02:23.332 ******** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=CentOS.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "CentOS.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}
ok: [managed-node3] => (item=CentOS_9.yml) => {
    "ansible_facts": {
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "vdo",
            "kmod-kvdo",
            "xfsprogs",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "CentOS_9.yml"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Monday 20 January 2025  06:26:28 -0500 (0:00:00.126)       0:02:23.459 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Monday 20 January 2025  06:26:28 -0500 (0:00:00.052)       0:02:23.512 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Monday 20 January 2025  06:26:28 -0500 (0:00:00.048)       0:02:23.560 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Monday 20 January 2025  06:26:28 -0500 (0:00:00.041)       0:02:23.601 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Monday 20 January 2025  06:26:28 -0500 (0:00:00.044)       0:02:23.646 ******** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Monday 20 January 2025  06:26:28 -0500 (0:00:00.093)       0:02:23.740 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Monday 20 January 2025  06:26:28 -0500 (0:00:00.042)       0:02:23.782 ******** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Monday 20 January 2025  06:26:28 -0500 (0:00:00.043)       0:02:23.826 ******** 
ok: [managed-node3] => {
    "storage_volumes": [
        {
            "disks": [
                "sda"
            ],
            "name": "test1",
            "state": "absent",
            "type": "disk"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Monday 20 January 2025  06:26:28 -0500 (0:00:00.046)       0:02:23.873 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Monday 20 January 2025  06:26:28 -0500 (0:00:00.063)       0:02:23.936 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Monday 20 January 2025  06:26:28 -0500 (0:00:00.053)       0:02:23.990 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Monday 20 January 2025  06:26:28 -0500 (0:00:00.042)       0:02:24.033 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Monday 20 January 2025  06:26:28 -0500 (0:00:00.042)       0:02:24.075 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Monday 20 January 2025  06:26:28 -0500 (0:00:00.193)       0:02:24.269 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Monday 20 January 2025  06:26:29 -0500 (0:00:00.121)       0:02:24.390 ******** 
changed: [managed-node3] => {
    "actions": [],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1"
    ],
    "mounts": [],
    "packages": [
        "xfsprogs"
    ],
    "pools": [],
    "volumes": [
        {
            "_device": "/dev/sda",
            "_mount_id": "/dev/sda",
            "_raw_device": "/dev/sda",
            "cache_devices": [],
            "cache_mode": null,
            "cache_size": 0,
            "cached": false,
            "compression": null,
            "deduplication": null,
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "fs_create_options": "",
            "fs_label": "",
            "fs_overwrite_existing": true,
            "fs_type": "xfs",
            "mount_check": 0,
            "mount_device_identifier": "uuid",
            "mount_group": null,
            "mount_mode": null,
            "mount_options": "defaults",
            "mount_passno": 0,
            "mount_point": null,
            "mount_user": null,
            "name": "test1",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "raid_stripe_size": null,
            "size": 10737418240,
            "state": "absent",
            "thin": null,
            "thin_pool_name": null,
            "thin_pool_size": null,
            "type": "disk",
            "vdo_pool_size": null
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Monday 20 January 2025  06:26:30 -0500 (0:00:01.071)       0:02:25.462 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Monday 20 January 2025  06:26:30 -0500 (0:00:00.129)       0:02:25.591 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737372380.2879567,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "85170829302ce3d8b3d8f3031aface16e161cfd6",
        "ctime": 1737372379.5599558,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 180355249,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1737372379.5599558,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "1487777884",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Monday 20 January 2025  06:26:30 -0500 (0:00:00.514)       0:02:26.106 ******** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Monday 20 January 2025  06:26:31 -0500 (0:00:00.486)       0:02:26.592 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Monday 20 January 2025  06:26:31 -0500 (0:00:00.046)       0:02:26.639 ******** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1"
        ],
        "mounts": [],
        "packages": [
            "xfsprogs"
        ],
        "pools": [],
        "volumes": [
            {
                "_device": "/dev/sda",
                "_mount_id": "/dev/sda",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "xfs",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Monday 20 January 2025  06:26:31 -0500 (0:00:00.058)       0:02:26.698 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Monday 20 January 2025  06:26:31 -0500 (0:00:00.059)       0:02:26.758 ******** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": [
            {
                "_device": "/dev/sda",
                "_mount_id": "/dev/sda",
                "_raw_device": "/dev/sda",
                "cache_devices": [],
                "cache_mode": null,
                "cache_size": 0,
                "cached": false,
                "compression": null,
                "deduplication": null,
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "fs_create_options": "",
                "fs_label": "",
                "fs_overwrite_existing": true,
                "fs_type": "xfs",
                "mount_check": 0,
                "mount_device_identifier": "uuid",
                "mount_group": null,
                "mount_mode": null,
                "mount_options": "defaults",
                "mount_passno": 0,
                "mount_point": null,
                "mount_user": null,
                "name": "test1",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "raid_stripe_size": null,
                "size": 10737418240,
                "state": "absent",
                "thin": null,
                "thin_pool_name": null,
                "thin_pool_size": null,
                "type": "disk",
                "vdo_pool_size": null
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Monday 20 January 2025  06:26:31 -0500 (0:00:00.043)       0:02:26.802 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Monday 20 January 2025  06:26:31 -0500 (0:00:00.070)       0:02:26.872 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Monday 20 January 2025  06:26:31 -0500 (0:00:00.037)       0:02:26.909 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Monday 20 January 2025  06:26:31 -0500 (0:00:00.102)       0:02:27.012 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Monday 20 January 2025  06:26:31 -0500 (0:00:00.112)       0:02:27.124 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Monday 20 January 2025  06:26:31 -0500 (0:00:00.062)       0:02:27.187 ******** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1737369493.8826442,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1734679556.747,
        "dev": 51713,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 4194436,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1734679277.524,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "850985565",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Monday 20 January 2025  06:26:32 -0500 (0:00:00.517)       0:02:27.705 ******** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Monday 20 January 2025  06:26:32 -0500 (0:00:00.068)       0:02:27.773 ******** 
ok: [managed-node3]

PLAY RECAP *********************************************************************
managed-node3              : ok=545  changed=17   unreachable=0    failed=0    skipped=556  rescued=6    ignored=0   


TASKS RECAP ********************************************************************
Monday 20 January 2025  06:26:33 -0500 (0:00:00.991)       0:02:28.765 ******** 
=============================================================================== 
fedora.linux_system_roles.storage : Get service facts ------------------- 1.96s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.75s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.75s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.73s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.71s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.61s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.60s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.60s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Make sure blivet is available ------- 1.53s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 
Ensure test packages ---------------------------------------------------- 1.53s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 
Ensure cryptsetup is present -------------------------------------------- 1.53s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.52s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
Ensure cryptsetup is present -------------------------------------------- 1.52s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Gathering Facts --------------------------------------------------------- 1.51s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/tests_misc.yml:2 
Ensure cryptsetup is present -------------------------------------------- 1.50s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Read the /etc/fstab file for volume existence --------------------------- 1.45s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 
Ensure cryptsetup is present -------------------------------------------- 1.42s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 
Ensure test packages ---------------------------------------------------- 1.35s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 
fedora.linux_system_roles.storage : Make sure required packages are installed --- 1.35s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.34s
/tmp/collections-6DZ/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69