ansible-playbook [core 2.17.8]
  config file = None
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/local/lib/python3.13/site-packages/ansible
  ansible collection location = /tmp/collections-xpt
  executable location = /usr/local/bin/ansible-playbook
  python version = 3.13.1 (main, Dec  9 2024, 00:00:00) [GCC 14.2.1 20240912 (Red Hat 14.2.1-3)] (/usr/bin/python3.13)
  jinja version = 3.1.5
  libyaml = True
No config file found; using defaults
running playbook inside collection fedora.linux_system_roles
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug
redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.

PLAYBOOK: tests_stratis.yml ****************************************************
1 plays in /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml

PLAY [Test stratis pool management] ********************************************

TASK [Gathering Facts] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:2
Saturday 08 February 2025  18:30:45 -0500 (0:00:00.013)       0:00:00.013 ***** 
[WARNING]: Platform linux on host managed-node3 is using the discovered Python
interpreter at /usr/bin/python3, but future installation of another Python
interpreter could change the meaning of that path. See
https://docs.ansible.com/ansible-
core/2.17/reference_appendices/interpreter_discovery.html for more information.
ok: [managed-node3]

TASK [Run the role] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:14
Saturday 08 February 2025  18:30:47 -0500 (0:00:01.398)       0:00:01.411 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.024)       0:00:01.435 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.025)       0:00:01.461 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.030)       0:00:01.491 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.040)       0:00:01.532 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.495)       0:00:02.027 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__storage_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.024)       0:00:02.052 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.016)       0:00:02.068 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.016)       0:00:02.085 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:30:47 -0500 (0:00:00.045)       0:00:02.130 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: libblockdev libblockdev-crypto libblockdev-dm libblockdev-fs libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:30:49 -0500 (0:00:01.525)       0:00:03.656 ***** 
ok: [managed-node3] => {
    "storage_pools": "VARIABLE IS NOT DEFINED!: 'storage_pools' is undefined"
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:30:49 -0500 (0:00:00.023)       0:00:03.679 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:30:49 -0500 (0:00:00.031)       0:00:03.711 ***** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:30:50 -0500 (0:00:00.837)       0:00:04.549 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2
Saturday 08 February 2025  18:30:50 -0500 (0:00:00.106)       0:00:04.656 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13
Saturday 08 February 2025  18:30:50 -0500 (0:00:00.063)       0:00:04.720 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "install_copr | d(false) | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19
Saturday 08 February 2025  18:30:50 -0500 (0:00:00.050)       0:00:04.770 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:30:50 -0500 (0:00:00.043)       0:00:04.814 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: kpartx

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:30:52 -0500 (0:00:01.473)       0:00:06.288 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "services": {
            "NetworkManager-dispatcher.service": {
                "name": "NetworkManager-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "NetworkManager-wait-online.service": {
                "name": "NetworkManager-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "NetworkManager.service": {
                "name": "NetworkManager.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "audit-rules.service": {
                "name": "audit-rules.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "auditd.service": {
                "name": "auditd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "auth-rpcgss-module.service": {
                "name": "auth-rpcgss-module.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "autovt@.service": {
                "name": "autovt@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "blivet.service": {
                "name": "blivet.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "blk-availability.service": {
                "name": "blk-availability.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "bluetooth.service": {
                "name": "bluetooth.service",
                "source": "systemd",
                "state": "inactive",
                "status": "enabled"
            },
            "capsule@.service": {
                "name": "capsule@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "chrony-wait.service": {
                "name": "chrony-wait.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd-restricted.service": {
                "name": "chronyd-restricted.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "chronyd.service": {
                "name": "chronyd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "cloud-config.service": {
                "name": "cloud-config.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-final.service": {
                "name": "cloud-final.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init-hotplugd.service": {
                "name": "cloud-init-hotplugd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "cloud-init-local.service": {
                "name": "cloud-init-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "cloud-init.service": {
                "name": "cloud-init.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "console-getty.service": {
                "name": "console-getty.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "container-getty@.service": {
                "name": "container-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "dbus-broker.service": {
                "name": "dbus-broker.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "dbus-org.bluez.service": {
                "name": "dbus-org.bluez.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.home1.service": {
                "name": "dbus-org.freedesktop.home1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.hostname1.service": {
                "name": "dbus-org.freedesktop.hostname1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.locale1.service": {
                "name": "dbus-org.freedesktop.locale1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.login1.service": {
                "name": "dbus-org.freedesktop.login1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.nm-dispatcher.service": {
                "name": "dbus-org.freedesktop.nm-dispatcher.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.oom1.service": {
                "name": "dbus-org.freedesktop.oom1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.portable1.service": {
                "name": "dbus-org.freedesktop.portable1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus-org.freedesktop.resolve1.service": {
                "name": "dbus-org.freedesktop.resolve1.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "dbus-org.freedesktop.timedate1.service": {
                "name": "dbus-org.freedesktop.timedate1.service",
                "source": "systemd",
                "state": "inactive",
                "status": "alias"
            },
            "dbus.service": {
                "name": "dbus.service",
                "source": "systemd",
                "state": "active",
                "status": "alias"
            },
            "debug-shell.service": {
                "name": "debug-shell.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd.service": {
                "name": "dhcpcd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dhcpcd@.service": {
                "name": "dhcpcd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "display-manager.service": {
                "name": "display-manager.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "dm-event.service": {
                "name": "dm-event.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "dnf-system-upgrade-cleanup.service": {
                "name": "dnf-system-upgrade-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf-system-upgrade.service": {
                "name": "dnf-system-upgrade.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dnf5-makecache.service": {
                "name": "dnf5-makecache.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf5-offline-transaction-cleanup.service": {
                "name": "dnf5-offline-transaction-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "dnf5-offline-transaction.service": {
                "name": "dnf5-offline-transaction.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "dracut-cmdline.service": {
                "name": "dracut-cmdline.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-initqueue.service": {
                "name": "dracut-initqueue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-mount.service": {
                "name": "dracut-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-mount.service": {
                "name": "dracut-pre-mount.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-pivot.service": {
                "name": "dracut-pre-pivot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-trigger.service": {
                "name": "dracut-pre-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-pre-udev.service": {
                "name": "dracut-pre-udev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown-onfailure.service": {
                "name": "dracut-shutdown-onfailure.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "dracut-shutdown.service": {
                "name": "dracut-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "emergency.service": {
                "name": "emergency.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fcoe.service": {
                "name": "fcoe.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "fips-crypto-policy-overlay.service": {
                "name": "fips-crypto-policy-overlay.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "firewalld.service": {
                "name": "firewalld.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fsidd.service": {
                "name": "fsidd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "fstrim.service": {
                "name": "fstrim.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "fwupd-offline-update.service": {
                "name": "fwupd-offline-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "fwupd-refresh.service": {
                "name": "fwupd-refresh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "fwupd.service": {
                "name": "fwupd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "getty@.service": {
                "name": "getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "enabled"
            },
            "getty@tty1.service": {
                "name": "getty@tty1.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "grub-boot-indeterminate.service": {
                "name": "grub-boot-indeterminate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "grub2-systemd-integration.service": {
                "name": "grub2-systemd-integration.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "gssproxy.service": {
                "name": "gssproxy.service",
                "source": "systemd",
                "state": "running",
                "status": "disabled"
            },
            "hv_kvp_daemon.service": {
                "name": "hv_kvp_daemon.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "initrd-cleanup.service": {
                "name": "initrd-cleanup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-parse-etc.service": {
                "name": "initrd-parse-etc.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-switch-root.service": {
                "name": "initrd-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "initrd-udevadm-cleanup-db.service": {
                "name": "initrd-udevadm-cleanup-db.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "iscsi-shutdown.service": {
                "name": "iscsi-shutdown.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsi.service": {
                "name": "iscsi.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "iscsid.service": {
                "name": "iscsid.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "kmod-static-nodes.service": {
                "name": "kmod-static-nodes.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "ldconfig.service": {
                "name": "ldconfig.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm-devices-import.service": {
                "name": "lvm-devices-import.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "lvm2-activation-early.service": {
                "name": "lvm2-activation-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "lvm2-lvmpolld.service": {
                "name": "lvm2-lvmpolld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "lvm2-monitor.service": {
                "name": "lvm2-monitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "man-db-cache-update.service": {
                "name": "man-db-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "man-db-restart-cache-update.service": {
                "name": "man-db-restart-cache-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "mdadm-grow-continue@.service": {
                "name": "mdadm-grow-continue@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdadm-last-resort@.service": {
                "name": "mdadm-last-resort@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdcheck_continue.service": {
                "name": "mdcheck_continue.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdcheck_start.service": {
                "name": "mdcheck_start.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmon@.service": {
                "name": "mdmon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "mdmonitor-oneshot.service": {
                "name": "mdmonitor-oneshot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "mdmonitor.service": {
                "name": "mdmonitor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "modprobe@.service": {
                "name": "modprobe@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "modprobe@configfs.service": {
                "name": "modprobe@configfs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_mod.service": {
                "name": "modprobe@dm_mod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@dm_multipath.service": {
                "name": "modprobe@dm_multipath.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@drm.service": {
                "name": "modprobe@drm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@efi_pstore.service": {
                "name": "modprobe@efi_pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@fuse.service": {
                "name": "modprobe@fuse.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "modprobe@loop.service": {
                "name": "modprobe@loop.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "multipathd.service": {
                "name": "multipathd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "network.service": {
                "name": "network.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "nfs-blkmap.service": {
                "name": "nfs-blkmap.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nfs-idmapd.service": {
                "name": "nfs-idmapd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-mountd.service": {
                "name": "nfs-mountd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfs-server.service": {
                "name": "nfs-server.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "nfs-utils.service": {
                "name": "nfs-utils.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nfsdcld.service": {
                "name": "nfsdcld.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "nftables.service": {
                "name": "nftables.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nis-domainname.service": {
                "name": "nis-domainname.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "nm-priv-helper.service": {
                "name": "nm-priv-helper.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "ntpd.service": {
                "name": "ntpd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ntpdate.service": {
                "name": "ntpdate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "pam_namespace.service": {
                "name": "pam_namespace.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "pcscd.service": {
                "name": "pcscd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "plymouth-halt.service": {
                "name": "plymouth-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-kexec.service": {
                "name": "plymouth-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-poweroff.service": {
                "name": "plymouth-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-quit-wait.service": {
                "name": "plymouth-quit-wait.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-quit.service": {
                "name": "plymouth-quit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-read-write.service": {
                "name": "plymouth-read-write.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-reboot.service": {
                "name": "plymouth-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-start.service": {
                "name": "plymouth-start.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "plymouth-switch-root-initramfs.service": {
                "name": "plymouth-switch-root-initramfs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "plymouth-switch-root.service": {
                "name": "plymouth-switch-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "polkit.service": {
                "name": "polkit.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "quotaon-root.service": {
                "name": "quotaon-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "quotaon@.service": {
                "name": "quotaon@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "raid-check.service": {
                "name": "raid-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rbdmap.service": {
                "name": "rbdmap.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rc-local.service": {
                "name": "rc-local.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rescue.service": {
                "name": "rescue.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "restraintd.service": {
                "name": "restraintd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rngd.service": {
                "name": "rngd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "rpc-gssd.service": {
                "name": "rpc-gssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd-notify.service": {
                "name": "rpc-statd-notify.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-statd.service": {
                "name": "rpc-statd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "rpc-svcgssd.service": {
                "name": "rpc-svcgssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "rpcbind.service": {
                "name": "rpcbind.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "rpmdb-migrate.service": {
                "name": "rpmdb-migrate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "rpmdb-rebuild.service": {
                "name": "rpmdb-rebuild.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "selinux-autorelabel-mark.service": {
                "name": "selinux-autorelabel-mark.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "selinux-autorelabel.service": {
                "name": "selinux-autorelabel.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "selinux-check-proper-disable.service": {
                "name": "selinux-check-proper-disable.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "serial-getty@.service": {
                "name": "serial-getty@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "serial-getty@ttyS0.service": {
                "name": "serial-getty@ttyS0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            },
            "sntp.service": {
                "name": "sntp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "ssh-host-keys-migration.service": {
                "name": "ssh-host-keys-migration.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "sshd-keygen.service": {
                "name": "sshd-keygen.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "sshd-keygen@.service": {
                "name": "sshd-keygen@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "sshd-keygen@ecdsa.service": {
                "name": "sshd-keygen@ecdsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@ed25519.service": {
                "name": "sshd-keygen@ed25519.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-keygen@rsa.service": {
                "name": "sshd-keygen@rsa.service",
                "source": "systemd",
                "state": "stopped",
                "status": "inactive"
            },
            "sshd-unix-local@.service": {
                "name": "sshd-unix-local@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "sshd-vsock@.service": {
                "name": "sshd-vsock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "alias"
            },
            "sshd.service": {
                "name": "sshd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "sshd@.service": {
                "name": "sshd@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "indirect"
            },
            "sssd-autofs.service": {
                "name": "sssd-autofs.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-kcm.service": {
                "name": "sssd-kcm.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "sssd-nss.service": {
                "name": "sssd-nss.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pac.service": {
                "name": "sssd-pac.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-pam.service": {
                "name": "sssd-pam.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-ssh.service": {
                "name": "sssd-ssh.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd-sudo.service": {
                "name": "sssd-sudo.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "sssd.service": {
                "name": "sssd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "stratis-fstab-setup@.service": {
                "name": "stratis-fstab-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "stratisd-min-postinitrd.service": {
                "name": "stratisd-min-postinitrd.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "stratisd.service": {
                "name": "stratisd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "syslog.service": {
                "name": "syslog.service",
                "source": "systemd",
                "state": "stopped",
                "status": "not-found"
            },
            "system-update-cleanup.service": {
                "name": "system-update-cleanup.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-ask-password-console.service": {
                "name": "systemd-ask-password-console.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-plymouth.service": {
                "name": "systemd-ask-password-plymouth.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-ask-password-wall.service": {
                "name": "systemd-ask-password-wall.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-backlight@.service": {
                "name": "systemd-backlight@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-battery-check.service": {
                "name": "systemd-battery-check.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-binfmt.service": {
                "name": "systemd-binfmt.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-bless-boot.service": {
                "name": "systemd-bless-boot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-boot-check-no-failures.service": {
                "name": "systemd-boot-check-no-failures.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-boot-random-seed.service": {
                "name": "systemd-boot-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-boot-update.service": {
                "name": "systemd-boot-update.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-bootctl@.service": {
                "name": "systemd-bootctl@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-bsod.service": {
                "name": "systemd-bsod.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-confext.service": {
                "name": "systemd-confext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-coredump@.service": {
                "name": "systemd-coredump@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-creds@.service": {
                "name": "systemd-creds@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-exit.service": {
                "name": "systemd-exit.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-firstboot.service": {
                "name": "systemd-firstboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-fsck-root.service": {
                "name": "systemd-fsck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-fsck@.service": {
                "name": "systemd-fsck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-growfs-root.service": {
                "name": "systemd-growfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-growfs@.service": {
                "name": "systemd-growfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-halt.service": {
                "name": "systemd-halt.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-hibernate-clear.service": {
                "name": "systemd-hibernate-clear.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate-resume.service": {
                "name": "systemd-hibernate-resume.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hibernate.service": {
                "name": "systemd-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-homed-activate.service": {
                "name": "systemd-homed-activate.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-homed-firstboot.service": {
                "name": "systemd-homed-firstboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-homed.service": {
                "name": "systemd-homed.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "systemd-hostnamed.service": {
                "name": "systemd-hostnamed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hwdb-update.service": {
                "name": "systemd-hwdb-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-hybrid-sleep.service": {
                "name": "systemd-hybrid-sleep.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-initctl.service": {
                "name": "systemd-initctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-catalog-update.service": {
                "name": "systemd-journal-catalog-update.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journal-flush.service": {
                "name": "systemd-journal-flush.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-journald-sync@.service": {
                "name": "systemd-journald-sync@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-journald.service": {
                "name": "systemd-journald.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-journald@.service": {
                "name": "systemd-journald@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-kexec.service": {
                "name": "systemd-kexec.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-localed.service": {
                "name": "systemd-localed.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-logind.service": {
                "name": "systemd-logind.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-machine-id-commit.service": {
                "name": "systemd-machine-id-commit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-modules-load.service": {
                "name": "systemd-modules-load.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-mountfsd.service": {
                "name": "systemd-mountfsd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "indirect"
            },
            "systemd-network-generator.service": {
                "name": "systemd-network-generator.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-networkd-persistent-storage.service": {
                "name": "systemd-networkd-persistent-storage.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-networkd-wait-online.service": {
                "name": "systemd-networkd-wait-online.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-networkd-wait-online@.service": {
                "name": "systemd-networkd-wait-online@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "disabled"
            },
            "systemd-networkd.service": {
                "name": "systemd-networkd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-nsresourced.service": {
                "name": "systemd-nsresourced.service",
                "source": "systemd",
                "state": "running",
                "status": "indirect"
            },
            "systemd-oomd.service": {
                "name": "systemd-oomd.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "systemd-pcrextend@.service": {
                "name": "systemd-pcrextend@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrfs-root.service": {
                "name": "systemd-pcrfs-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pcrfs@.service": {
                "name": "systemd-pcrfs@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrlock-file-system.service": {
                "name": "systemd-pcrlock-file-system.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-code.service": {
                "name": "systemd-pcrlock-firmware-code.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-firmware-config.service": {
                "name": "systemd-pcrlock-firmware-config.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-machine-id.service": {
                "name": "systemd-pcrlock-machine-id.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-make-policy.service": {
                "name": "systemd-pcrlock-make-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-authority.service": {
                "name": "systemd-pcrlock-secureboot-authority.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock-secureboot-policy.service": {
                "name": "systemd-pcrlock-secureboot-policy.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-pcrlock@.service": {
                "name": "systemd-pcrlock@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-pcrmachine.service": {
                "name": "systemd-pcrmachine.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-initrd.service": {
                "name": "systemd-pcrphase-initrd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase-sysinit.service": {
                "name": "systemd-pcrphase-sysinit.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-pcrphase.service": {
                "name": "systemd-pcrphase.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-portabled.service": {
                "name": "systemd-portabled.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-poweroff.service": {
                "name": "systemd-poweroff.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-pstore.service": {
                "name": "systemd-pstore.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-quotacheck-root.service": {
                "name": "systemd-quotacheck-root.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-quotacheck@.service": {
                "name": "systemd-quotacheck@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-random-seed.service": {
                "name": "systemd-random-seed.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-reboot.service": {
                "name": "systemd-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-remount-fs.service": {
                "name": "systemd-remount-fs.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled-runtime"
            },
            "systemd-repart.service": {
                "name": "systemd-repart.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-resolved.service": {
                "name": "systemd-resolved.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "systemd-rfkill.service": {
                "name": "systemd-rfkill.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-soft-reboot.service": {
                "name": "systemd-soft-reboot.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-storagetm.service": {
                "name": "systemd-storagetm.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend-then-hibernate.service": {
                "name": "systemd-suspend-then-hibernate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-suspend.service": {
                "name": "systemd-suspend.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-sysctl.service": {
                "name": "systemd-sysctl.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-sysext.service": {
                "name": "systemd-sysext.service",
                "source": "systemd",
                "state": "stopped",
                "status": "enabled"
            },
            "systemd-sysext@.service": {
                "name": "systemd-sysext@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-sysupdate-reboot.service": {
                "name": "systemd-sysupdate-reboot.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysupdate.service": {
                "name": "systemd-sysupdate.service",
                "source": "systemd",
                "state": "inactive",
                "status": "indirect"
            },
            "systemd-sysusers.service": {
                "name": "systemd-sysusers.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-time-wait-sync.service": {
                "name": "systemd-time-wait-sync.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "systemd-timedated.service": {
                "name": "systemd-timedated.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-timesyncd.service": {
                "name": "systemd-timesyncd.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-tmpfiles-clean.service": {
                "name": "systemd-tmpfiles-clean.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev-early.service": {
                "name": "systemd-tmpfiles-setup-dev-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup-dev.service": {
                "name": "systemd-tmpfiles-setup-dev.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tmpfiles-setup.service": {
                "name": "systemd-tmpfiles-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup-early.service": {
                "name": "systemd-tpm2-setup-early.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-tpm2-setup.service": {
                "name": "systemd-tpm2-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-load-credentials.service": {
                "name": "systemd-udev-load-credentials.service",
                "source": "systemd",
                "state": "stopped",
                "status": "disabled"
            },
            "systemd-udev-settle.service": {
                "name": "systemd-udev-settle.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udev-trigger.service": {
                "name": "systemd-udev-trigger.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-udevd.service": {
                "name": "systemd-udevd.service",
                "source": "systemd",
                "state": "running",
                "status": "static"
            },
            "systemd-update-done.service": {
                "name": "systemd-update-done.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp-runlevel.service": {
                "name": "systemd-update-utmp-runlevel.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-update-utmp.service": {
                "name": "systemd-update-utmp.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-user-sessions.service": {
                "name": "systemd-user-sessions.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-userdbd.service": {
                "name": "systemd-userdbd.service",
                "source": "systemd",
                "state": "running",
                "status": "indirect"
            },
            "systemd-vconsole-setup.service": {
                "name": "systemd-vconsole-setup.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "systemd-volatile-root.service": {
                "name": "systemd-volatile-root.service",
                "source": "systemd",
                "state": "inactive",
                "status": "static"
            },
            "systemd-zram-setup@.service": {
                "name": "systemd-zram-setup@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "systemd-zram-setup@zram0.service": {
                "name": "systemd-zram-setup@zram0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "target.service": {
                "name": "target.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "targetclid.service": {
                "name": "targetclid.service",
                "source": "systemd",
                "state": "inactive",
                "status": "disabled"
            },
            "udisks2.service": {
                "name": "udisks2.service",
                "source": "systemd",
                "state": "running",
                "status": "enabled"
            },
            "unbound-anchor.service": {
                "name": "unbound-anchor.service",
                "source": "systemd",
                "state": "stopped",
                "status": "static"
            },
            "user-runtime-dir@.service": {
                "name": "user-runtime-dir@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user-runtime-dir@0.service": {
                "name": "user-runtime-dir@0.service",
                "source": "systemd",
                "state": "stopped",
                "status": "active"
            },
            "user@.service": {
                "name": "user@.service",
                "source": "systemd",
                "state": "unknown",
                "status": "static"
            },
            "user@0.service": {
                "name": "user@0.service",
                "source": "systemd",
                "state": "running",
                "status": "active"
            }
        }
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:30:55 -0500 (0:00:03.091)       0:00:09.379 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:30:55 -0500 (0:00:00.047)       0:00:09.427 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:30:55 -0500 (0:00:00.016)       0:00:09.444 ***** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [],
    "mounts": [],
    "packages": [],
    "pools": [],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:30:55 -0500 (0:00:00.605)       0:00:10.049 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:30:55 -0500 (0:00:00.032)       0:00:10.082 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057334.8215806,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057334.8205807,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057334.8205807,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.446)       0:00:10.528 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.037)       0:00:10.566 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.028)       0:00:10.595 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [],
        "mounts": [],
        "packages": [],
        "pools": [],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.048)       0:00:10.643 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.051)       0:00:10.694 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.049)       0:00:10.744 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.119)       0:00:10.864 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.054)       0:00:10.918 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.056)       0:00:10.975 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.073)       0:00:11.049 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:30:56 -0500 (0:00:00.035)       0:00:11.084 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:30:57 -0500 (0:00:00.465)       0:00:11.549 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:30:57 -0500 (0:00:00.017)       0:00:11.567 ***** 
ok: [managed-node3]

TASK [Mark tasks to be skipped] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:18
Saturday 08 February 2025  18:30:58 -0500 (0:00:00.947)       0:00:12.514 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_skip_checks": [
            "blivet_available",
            "packages_installed",
            "service_facts"
        ]
    },
    "changed": false
}

TASK [Gather package facts] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:25
Saturday 08 February 2025  18:30:58 -0500 (0:00:00.043)       0:00:12.558 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "packages": {
            "ModemManager-glib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ModemManager-glib",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.22.0"
                }
            ],
            "NetworkManager": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "NetworkManager",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.50.2"
                }
            ],
            "NetworkManager-libnm": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "NetworkManager-libnm",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.50.2"
                }
            ],
            "abattis-cantarell-vf-fonts": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "abattis-cantarell-vf-fonts",
                    "release": "13.fc41",
                    "source": "rpm",
                    "version": "0.301"
                }
            ],
            "alternatives": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "alternatives",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.31"
                }
            ],
            "amd-gpu-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "amd-gpu-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "amd-ucode-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "amd-ucode-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "aspell": [
                {
                    "arch": "x86_64",
                    "epoch": 12,
                    "name": "aspell",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.60.8.1"
                }
            ],
            "aspell-en": [
                {
                    "arch": "x86_64",
                    "epoch": 50,
                    "name": "aspell-en",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "2020.12.07"
                }
            ],
            "atheros-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "atheros-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "audit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "audit",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.0.3"
                }
            ],
            "audit-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "audit-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.0.3"
                }
            ],
            "audit-rules": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "audit-rules",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.0.3"
                }
            ],
            "authselect": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "authselect",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "authselect-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "authselect-libs",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "avahi-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "avahi-libs",
                    "release": "29.fc41",
                    "source": "rpm",
                    "version": "0.8"
                }
            ],
            "basesystem": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "basesystem",
                    "release": "21.fc41",
                    "source": "rpm",
                    "version": "11"
                }
            ],
            "bash": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bash",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "5.2.32"
                }
            ],
            "bash-completion": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "bash-completion",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.13"
                }
            ],
            "bc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bc",
                    "release": "22.fc41",
                    "source": "rpm",
                    "version": "1.07.1"
                }
            ],
            "beakerlib": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "beakerlib",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.31.3"
                }
            ],
            "beakerlib-redhat": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "beakerlib-redhat",
                    "release": "35.fc41eng",
                    "source": "rpm",
                    "version": "1"
                }
            ],
            "binutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "binutils",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "2.43.1"
                }
            ],
            "bison": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bison",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "3.8.2"
                }
            ],
            "blivet-data": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "blivet-data",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "3.11.0"
                }
            ],
            "bluez": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bluez",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "5.79"
                }
            ],
            "boost-atomic": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "boost-atomic",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.83.0"
                }
            ],
            "boost-filesystem": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "boost-filesystem",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.83.0"
                }
            ],
            "boost-system": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "boost-system",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.83.0"
                }
            ],
            "boost-thread": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "boost-thread",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.83.0"
                }
            ],
            "brcmfmac-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "brcmfmac-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "btrfs-progs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "btrfs-progs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "6.12"
                }
            ],
            "bzip2-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "bzip2-libs",
                    "release": "19.fc41",
                    "source": "rpm",
                    "version": "1.0.8"
                }
            ],
            "c-ares": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "c-ares",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.34.4"
                }
            ],
            "ca-certificates": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "ca-certificates",
                    "release": "1.0.fc41",
                    "source": "rpm",
                    "version": "2024.2.69_v8.0.401"
                }
            ],
            "checkpolicy": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "checkpolicy",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "chrony": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "chrony",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.6.1"
                }
            ],
            "cirrus-audio-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "cirrus-audio-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "clevis": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "clevis",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "21"
                }
            ],
            "clevis-luks": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "clevis-luks",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "21"
                }
            ],
            "clevis-pin-tpm2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "clevis-pin-tpm2",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "0.5.3"
                }
            ],
            "cloud-init": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "cloud-init",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "24.2"
                }
            ],
            "cloud-utils-growpart": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "cloud-utils-growpart",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "0.33"
                }
            ],
            "cmake-filesystem": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cmake-filesystem",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.30.7"
                }
            ],
            "coreutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "coreutils",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "9.5"
                }
            ],
            "coreutils-common": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "coreutils-common",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "9.5"
                }
            ],
            "cpio": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cpio",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.15"
                }
            ],
            "cpp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cpp",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "14.2.1"
                }
            ],
            "cracklib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cracklib",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2.9.11"
                }
            ],
            "cracklib-dicts": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cracklib-dicts",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2.9.11"
                }
            ],
            "crypto-policies": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "crypto-policies",
                    "release": "1.git4d262e7.fc41",
                    "source": "rpm",
                    "version": "20250124"
                }
            ],
            "crypto-policies-scripts": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "crypto-policies-scripts",
                    "release": "1.git4d262e7.fc41",
                    "source": "rpm",
                    "version": "20250124"
                }
            ],
            "cryptsetup": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cryptsetup",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.7.5"
                }
            ],
            "cryptsetup-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cryptsetup-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.7.5"
                }
            ],
            "curl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "curl",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "8.9.1"
                }
            ],
            "cyrus-sasl-gssapi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cyrus-sasl-gssapi",
                    "release": "27.fc41",
                    "source": "rpm",
                    "version": "2.1.28"
                }
            ],
            "cyrus-sasl-lib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "cyrus-sasl-lib",
                    "release": "27.fc41",
                    "source": "rpm",
                    "version": "2.1.28"
                }
            ],
            "dbus": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "dbus",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.14.10"
                }
            ],
            "dbus-broker": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dbus-broker",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "36"
                }
            ],
            "dbus-common": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "dbus-common",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.14.10"
                }
            ],
            "dbus-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "dbus-libs",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.14.10"
                }
            ],
            "default-fonts-core-sans": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "default-fonts-core-sans",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "4.1"
                }
            ],
            "device-mapper": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.02.199"
                }
            ],
            "device-mapper-event": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-event",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.02.199"
                }
            ],
            "device-mapper-event-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-event-libs",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.02.199"
                }
            ],
            "device-mapper-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-libs",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.02.199"
                }
            ],
            "device-mapper-multipath": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-multipath",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.9.9"
                }
            ],
            "device-mapper-multipath-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-multipath-libs",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.9.9"
                }
            ],
            "device-mapper-persistent-data": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "device-mapper-persistent-data",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "1.0.12"
                }
            ],
            "dhcp-client": [
                {
                    "arch": "x86_64",
                    "epoch": 12,
                    "name": "dhcp-client",
                    "release": "14.P1.fc41",
                    "source": "rpm",
                    "version": "4.4.3"
                }
            ],
            "dhcp-common": [
                {
                    "arch": "noarch",
                    "epoch": 12,
                    "name": "dhcp-common",
                    "release": "14.P1.fc41",
                    "source": "rpm",
                    "version": "4.4.3"
                }
            ],
            "dhcpcd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dhcpcd",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "10.1.0"
                }
            ],
            "diffutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "diffutils",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "3.10"
                }
            ],
            "dnf-data": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "dnf-data",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "4.22.0"
                }
            ],
            "dnf-plugins-core": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "dnf-plugins-core",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.10.0"
                }
            ],
            "dnf-utils": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "dnf-utils",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.10.0"
                }
            ],
            "dnf5": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dnf5",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "5.2.8.1"
                }
            ],
            "dnf5-plugins": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dnf5-plugins",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "5.2.8.1"
                }
            ],
            "dosfstools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dosfstools",
                    "release": "13.fc41",
                    "source": "rpm",
                    "version": "4.2"
                }
            ],
            "dracut": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dracut",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "103"
                }
            ],
            "dracut-config-rescue": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dracut-config-rescue",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "103"
                }
            ],
            "duktape": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "duktape",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "2.7.0"
                }
            ],
            "dyninst": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "dyninst",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "12.3.0"
                }
            ],
            "e2fsprogs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "e2fsprogs",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.47.1"
                }
            ],
            "e2fsprogs-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "e2fsprogs-libs",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.47.1"
                }
            ],
            "efivar-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "efivar-libs",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "39"
                }
            ],
            "elfutils-debuginfod-client": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-debuginfod-client",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.192"
                }
            ],
            "elfutils-debuginfod-client-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-debuginfod-client-devel",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.192"
                }
            ],
            "elfutils-default-yama-scope": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "elfutils-default-yama-scope",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.192"
                }
            ],
            "elfutils-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-devel",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.192"
                }
            ],
            "elfutils-libelf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-libelf",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.192"
                }
            ],
            "elfutils-libelf-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-libelf-devel",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.192"
                }
            ],
            "elfutils-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "elfutils-libs",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.192"
                }
            ],
            "exfatprogs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "exfatprogs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.2.6"
                }
            ],
            "expat": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "expat",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.6.4"
                }
            ],
            "fedora-gpg-keys": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "fedora-gpg-keys",
                    "release": "1",
                    "source": "rpm",
                    "version": "41"
                }
            ],
            "fedora-release": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "fedora-release",
                    "release": "29",
                    "source": "rpm",
                    "version": "41"
                }
            ],
            "fedora-release-common": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "fedora-release-common",
                    "release": "29",
                    "source": "rpm",
                    "version": "41"
                }
            ],
            "fedora-release-identity-basic": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "fedora-release-identity-basic",
                    "release": "29",
                    "source": "rpm",
                    "version": "41"
                }
            ],
            "fedora-repos": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "fedora-repos",
                    "release": "1",
                    "source": "rpm",
                    "version": "41"
                }
            ],
            "file": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "file",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "5.45"
                }
            ],
            "file-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "file-libs",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "5.45"
                }
            ],
            "filesystem": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "filesystem",
                    "release": "23.fc41",
                    "source": "rpm",
                    "version": "3.18"
                }
            ],
            "findutils": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "findutils",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "4.10.0"
                }
            ],
            "firewalld": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "firewalld",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.2.3"
                }
            ],
            "firewalld-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "firewalld-filesystem",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.2.3"
                }
            ],
            "flex": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "flex",
                    "release": "18.fc41",
                    "source": "rpm",
                    "version": "2.6.4"
                }
            ],
            "fmt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "fmt",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "11.0.2"
                }
            ],
            "fonts-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "fonts-filesystem",
                    "release": "17.fc41",
                    "source": "rpm",
                    "version": "2.0.5"
                }
            ],
            "fuse3-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "fuse3-libs",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "3.16.2"
                }
            ],
            "fwupd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "fwupd",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.9.28"
                }
            ],
            "fwupd-plugin-modem-manager": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "fwupd-plugin-modem-manager",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.9.28"
                }
            ],
            "fwupd-plugin-uefi-capsule-data": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "fwupd-plugin-uefi-capsule-data",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.9.28"
                }
            ],
            "gawk": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gawk",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "5.3.0"
                }
            ],
            "gcc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gcc",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "14.2.1"
                }
            ],
            "gdbm": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "gdbm",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.23"
                }
            ],
            "gdbm-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "gdbm-libs",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.23"
                }
            ],
            "gdisk": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gdisk",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.0.10"
                }
            ],
            "gettext-envsubst": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gettext-envsubst",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "0.22.5"
                }
            ],
            "gettext-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gettext-libs",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "0.22.5"
                }
            ],
            "gettext-runtime": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gettext-runtime",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "0.22.5"
                }
            ],
            "git": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "git",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.48.1"
                }
            ],
            "git-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "git-core",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.48.1"
                }
            ],
            "git-core-doc": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "git-core-doc",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.48.1"
                }
            ],
            "glib2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glib2",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.82.2"
                }
            ],
            "glibc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc",
                    "release": "21.fc41",
                    "source": "rpm",
                    "version": "2.40"
                }
            ],
            "glibc-common": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-common",
                    "release": "21.fc41",
                    "source": "rpm",
                    "version": "2.40"
                }
            ],
            "glibc-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-devel",
                    "release": "21.fc41",
                    "source": "rpm",
                    "version": "2.40"
                }
            ],
            "glibc-gconv-extra": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-gconv-extra",
                    "release": "21.fc41",
                    "source": "rpm",
                    "version": "2.40"
                }
            ],
            "glibc-langpack-en": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "glibc-langpack-en",
                    "release": "21.fc41",
                    "source": "rpm",
                    "version": "2.40"
                }
            ],
            "gmp": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "gmp",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "6.3.0"
                }
            ],
            "gnupg2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gnupg2",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "2.4.5"
                }
            ],
            "gnutls": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gnutls",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "3.8.8"
                }
            ],
            "gnutls-dane": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gnutls-dane",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "3.8.8"
                }
            ],
            "gobject-introspection": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gobject-introspection",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.82.0"
                }
            ],
            "google-noto-fonts-common": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "google-noto-fonts-common",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "20240701"
                }
            ],
            "google-noto-sans-mono-vf-fonts": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "google-noto-sans-mono-vf-fonts",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "20240701"
                }
            ],
            "google-noto-sans-vf-fonts": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "google-noto-sans-vf-fonts",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "20240701"
                }
            ],
            "google-noto-serif-vf-fonts": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "google-noto-serif-vf-fonts",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "20240701"
                }
            ],
            "gpg-pubkey": [
                {
                    "arch": null,
                    "epoch": null,
                    "name": "gpg-pubkey",
                    "release": "64d2612c",
                    "source": "rpm",
                    "version": "e99d6ad1"
                },
                {
                    "arch": null,
                    "epoch": null,
                    "name": "gpg-pubkey",
                    "release": "5e67a958",
                    "source": "rpm",
                    "version": "3a97f4df"
                }
            ],
            "gpgme": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gpgme",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "1.23.2"
                }
            ],
            "gpm-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gpm-libs",
                    "release": "48.fc41",
                    "source": "rpm",
                    "version": "1.20.7"
                }
            ],
            "grep": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "grep",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "3.11"
                }
            ],
            "groff-base": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "groff-base",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.23.0"
                }
            ],
            "grub2-common": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "grub2-common",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "2.12"
                }
            ],
            "grub2-pc": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "grub2-pc",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "2.12"
                }
            ],
            "grub2-pc-modules": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "grub2-pc-modules",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "2.12"
                }
            ],
            "grub2-tools": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "grub2-tools",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "2.12"
                }
            ],
            "grub2-tools-minimal": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "grub2-tools-minimal",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "2.12"
                }
            ],
            "grubby": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "grubby",
                    "release": "78.fc41",
                    "source": "rpm",
                    "version": "8.40"
                }
            ],
            "gssproxy": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gssproxy",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.9.2"
                }
            ],
            "gzip": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "gzip",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.13"
                }
            ],
            "hiredis": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "hiredis",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "1.2.0"
                }
            ],
            "hostname": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "hostname",
                    "release": "13.fc41",
                    "source": "rpm",
                    "version": "3.23"
                }
            ],
            "hunspell": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "hunspell",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.7.2"
                }
            ],
            "hunspell-en": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "hunspell-en",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "0.20201207"
                }
            ],
            "hunspell-en-GB": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "hunspell-en-GB",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "0.20201207"
                }
            ],
            "hunspell-en-US": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "hunspell-en-US",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "0.20201207"
                }
            ],
            "hunspell-filesystem": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "hunspell-filesystem",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.7.2"
                }
            ],
            "ima-evm-utils-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ima-evm-utils-libs",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.6.2"
                }
            ],
            "inih": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "inih",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "58"
                }
            ],
            "initscripts-service": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "initscripts-service",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "10.26"
                }
            ],
            "intel-audio-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "intel-audio-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "intel-gpu-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "intel-gpu-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "ipcalc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ipcalc",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "1.0.3"
                }
            ],
            "iproute": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iproute",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "6.10.0"
                }
            ],
            "iptables-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iptables-libs",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "1.8.10"
                }
            ],
            "iptables-nft": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iptables-nft",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "1.8.10"
                }
            ],
            "iputils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "iputils",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20240905"
                }
            ],
            "jansson": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jansson",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "2.13.1"
                }
            ],
            "jitterentropy": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jitterentropy",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.6.0"
                }
            ],
            "jose": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jose",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "14"
                }
            ],
            "jq": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "jq",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.7.1"
                }
            ],
            "json-c": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "json-c",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "0.17"
                }
            ],
            "json-glib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "json-glib",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.10.0"
                }
            ],
            "kbd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kbd",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.6.4"
                }
            ],
            "kbd-legacy": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "kbd-legacy",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.6.4"
                }
            ],
            "kbd-misc": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "kbd-misc",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.6.4"
                }
            ],
            "kernel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel",
                    "release": "200.fc41",
                    "source": "rpm",
                    "version": "6.12.11"
                }
            ],
            "kernel-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-core",
                    "release": "200.fc41",
                    "source": "rpm",
                    "version": "6.12.11"
                }
            ],
            "kernel-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-devel",
                    "release": "200.fc41",
                    "source": "rpm",
                    "version": "6.12.11"
                }
            ],
            "kernel-headers": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-headers",
                    "release": "200.fc41",
                    "source": "rpm",
                    "version": "6.12.4"
                }
            ],
            "kernel-modules": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-modules",
                    "release": "200.fc41",
                    "source": "rpm",
                    "version": "6.12.11"
                }
            ],
            "kernel-modules-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kernel-modules-core",
                    "release": "200.fc41",
                    "source": "rpm",
                    "version": "6.12.11"
                }
            ],
            "keyutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "keyutils",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.6.3"
                }
            ],
            "keyutils-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "keyutils-libs",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.6.3"
                }
            ],
            "kmod": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kmod",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "33"
                }
            ],
            "kmod-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kmod-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "33"
                }
            ],
            "kpartx": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "kpartx",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.9.9"
                }
            ],
            "krb5-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "krb5-libs",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "1.21.3"
                }
            ],
            "langpacks-core-en": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "langpacks-core-en",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "4.1"
                }
            ],
            "langpacks-en": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "langpacks-en",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "4.1"
                }
            ],
            "langpacks-fonts-en": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "langpacks-fonts-en",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "4.1"
                }
            ],
            "less": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "less",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "668"
                }
            ],
            "libacl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libacl",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.3.2"
                }
            ],
            "libaio": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libaio",
                    "release": "20.fc41",
                    "source": "rpm",
                    "version": "0.3.111"
                }
            ],
            "libarchive": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libarchive",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "3.7.4"
                }
            ],
            "libassuan": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libassuan",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.5.7"
                }
            ],
            "libatasmart": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libatasmart",
                    "release": "29.fc41",
                    "source": "rpm",
                    "version": "0.19"
                }
            ],
            "libattr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libattr",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.5.2"
                }
            ],
            "libb2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libb2",
                    "release": "12.fc41",
                    "source": "rpm",
                    "version": "0.98.1"
                }
            ],
            "libbasicobjects": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libbasicobjects",
                    "release": "57.fc41",
                    "source": "rpm",
                    "version": "0.1.1"
                }
            ],
            "libblkid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblkid",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "libblockdev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-btrfs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-btrfs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-crypto": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-crypto",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-dm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-dm",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-fs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-fs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-loop": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-loop",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-lvm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-lvm",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-mdraid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-mdraid",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-mpath": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-mpath",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-nvme": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-nvme",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-part": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-part",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-swap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-swap",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libblockdev-utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libblockdev-utils",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "libbpf": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "libbpf",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.4.7"
                }
            ],
            "libbrotli": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libbrotli",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "1.1.0"
                }
            ],
            "libbytesize": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libbytesize",
                    "release": "99.fc41",
                    "source": "rpm",
                    "version": "2.11"
                }
            ],
            "libcap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcap",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.70"
                }
            ],
            "libcap-ng": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcap-ng",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.8.5"
                }
            ],
            "libcbor": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcbor",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.11.0"
                }
            ],
            "libcollection": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcollection",
                    "release": "57.fc41",
                    "source": "rpm",
                    "version": "0.7.0"
                }
            ],
            "libcom_err": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcom_err",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.47.1"
                }
            ],
            "libcomps": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcomps",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "0.1.21"
                }
            ],
            "libcurl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libcurl",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "8.9.1"
                }
            ],
            "libdhash": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdhash",
                    "release": "57.fc41",
                    "source": "rpm",
                    "version": "0.5.0"
                }
            ],
            "libdnf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdnf",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.73.4"
                }
            ],
            "libdnf5": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdnf5",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "5.2.8.1"
                }
            ],
            "libdnf5-cli": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libdnf5-cli",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "5.2.8.1"
                }
            ],
            "libeconf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libeconf",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.6.2"
                }
            ],
            "libedit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libedit",
                    "release": "54.20250104cvs.fc41",
                    "source": "rpm",
                    "version": "3.1"
                }
            ],
            "libev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libev",
                    "release": "12.fc41",
                    "source": "rpm",
                    "version": "4.33"
                }
            ],
            "libevdev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libevdev",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.13.3"
                }
            ],
            "libevent": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libevent",
                    "release": "14.fc41",
                    "source": "rpm",
                    "version": "2.1.12"
                }
            ],
            "libfdisk": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libfdisk",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "libffi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libffi",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "3.4.6"
                }
            ],
            "libfido2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libfido2",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.15.0"
                }
            ],
            "libfsverity": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libfsverity",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.6"
                }
            ],
            "libgcc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgcc",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "14.2.1"
                }
            ],
            "libgcrypt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgcrypt",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "1.11.0"
                }
            ],
            "libgomp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgomp",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "14.2.1"
                }
            ],
            "libgpg-error": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgpg-error",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.50"
                }
            ],
            "libgudev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgudev",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "238"
                }
            ],
            "libgusb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libgusb",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.4.9"
                }
            ],
            "libidn2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libidn2",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.3.7"
                }
            ],
            "libini_config": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libini_config",
                    "release": "57.fc41",
                    "source": "rpm",
                    "version": "1.3.1"
                }
            ],
            "libjcat": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libjcat",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.2.2"
                }
            ],
            "libjose": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libjose",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "14"
                }
            ],
            "libkcapi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libkcapi",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "libkcapi-hasher": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libkcapi-hasher",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "libkcapi-hmaccalc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libkcapi-hmaccalc",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "libksba": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libksba",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.6.7"
                }
            ],
            "libldb": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "libldb",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "4.21.3"
                }
            ],
            "libluksmeta": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libluksmeta",
                    "release": "23.fc41",
                    "source": "rpm",
                    "version": "9"
                }
            ],
            "libmaxminddb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmaxminddb",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.12.2"
                }
            ],
            "libmbim": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmbim",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.30.0"
                }
            ],
            "libmnl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmnl",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.0.5"
                }
            ],
            "libmodulemd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmodulemd",
                    "release": "14.fc41",
                    "source": "rpm",
                    "version": "2.15.0"
                }
            ],
            "libmount": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmount",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "libmpc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libmpc",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.3.1"
                }
            ],
            "libndp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libndp",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.9"
                }
            ],
            "libnetfilter_conntrack": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnetfilter_conntrack",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "libnfnetlink": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnfnetlink",
                    "release": "28.fc41",
                    "source": "rpm",
                    "version": "1.0.1"
                }
            ],
            "libnfsidmap": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "libnfsidmap",
                    "release": "5.rc2.fc41",
                    "source": "rpm",
                    "version": "2.8.1"
                }
            ],
            "libnftnl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnftnl",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.2.7"
                }
            ],
            "libnghttp2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnghttp2",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.62.1"
                }
            ],
            "libnl3": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnl3",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.11.0"
                }
            ],
            "libnsl2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnsl2",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.0.1"
                }
            ],
            "libnvme": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libnvme",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.10"
                }
            ],
            "libpath_utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpath_utils",
                    "release": "57.fc41",
                    "source": "rpm",
                    "version": "0.2.1"
                }
            ],
            "libpipeline": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpipeline",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.5.7"
                }
            ],
            "libpkgconf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpkgconf",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.3.0"
                }
            ],
            "libpsl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpsl",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "0.21.5"
                }
            ],
            "libpwquality": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libpwquality",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "1.4.5"
                }
            ],
            "libqmi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libqmi",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.34.0"
                }
            ],
            "libqrtr-glib": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libqrtr-glib",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.2.2"
                }
            ],
            "libref_array": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libref_array",
                    "release": "57.fc41",
                    "source": "rpm",
                    "version": "0.1.5"
                }
            ],
            "librepo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "librepo",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.19.0"
                }
            ],
            "libreport-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "libreport-filesystem",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "2.17.15"
                }
            ],
            "libseccomp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libseccomp",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.5.5"
                }
            ],
            "libselinux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libselinux",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "libselinux-utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libselinux-utils",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "libsemanage": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsemanage",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "libsepol": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsepol",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "libsmartcols": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsmartcols",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "libsodium": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsodium",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.0.20"
                }
            ],
            "libsolv": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsolv",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.7.31"
                }
            ],
            "libss": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libss",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.47.1"
                }
            ],
            "libssh": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libssh",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "0.10.6"
                }
            ],
            "libssh-config": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "libssh-config",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "0.10.6"
                }
            ],
            "libsss_certmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_certmap",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "libsss_idmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_idmap",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "libsss_nss_idmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_nss_idmap",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "libsss_sudo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libsss_sudo",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "libstdc++": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libstdc++",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "14.2.1"
                }
            ],
            "libtalloc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtalloc",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.4.2"
                }
            ],
            "libtasn1": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtasn1",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "4.19.0"
                }
            ],
            "libtdb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtdb",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "1.4.12"
                }
            ],
            "libtevent": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtevent",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "0.16.1"
                }
            ],
            "libtextstyle": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtextstyle",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "0.22.5"
                }
            ],
            "libtirpc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtirpc",
                    "release": "1.rc3.fc41",
                    "source": "rpm",
                    "version": "1.3.6"
                }
            ],
            "libtool-ltdl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libtool-ltdl",
                    "release": "12.fc41",
                    "source": "rpm",
                    "version": "2.4.7"
                }
            ],
            "libudisks2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libudisks2",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2.10.1"
                }
            ],
            "libunistring": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libunistring",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.1"
                }
            ],
            "libusb1": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libusb1",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.0.27"
                }
            ],
            "libutempter": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libutempter",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "1.2.1"
                }
            ],
            "libuuid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libuuid",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "libverto": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libverto",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "0.3.2"
                }
            ],
            "libverto-libev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libverto-libev",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "0.3.2"
                }
            ],
            "libxcrypt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxcrypt",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "4.4.38"
                }
            ],
            "libxcrypt-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxcrypt-devel",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "4.4.38"
                }
            ],
            "libxkbcommon": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxkbcommon",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.7.0"
                }
            ],
            "libxml2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxml2",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.12.9"
                }
            ],
            "libxmlb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxmlb",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.3.21"
                }
            ],
            "libxslt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libxslt",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.1.42"
                }
            ],
            "libyaml": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libyaml",
                    "release": "15.fc41",
                    "source": "rpm",
                    "version": "0.2.5"
                }
            ],
            "libzstd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libzstd",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.5.6"
                }
            ],
            "libzstd-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "libzstd-devel",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.5.6"
                }
            ],
            "linux-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "linux-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "linux-firmware-whence": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "linux-firmware-whence",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "lmdb-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lmdb-libs",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.9.33"
                }
            ],
            "lsof": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lsof",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "4.98.0"
                }
            ],
            "lua-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lua-libs",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "5.4.6"
                }
            ],
            "luksmeta": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "luksmeta",
                    "release": "23.fc41",
                    "source": "rpm",
                    "version": "9"
                }
            ],
            "lvm2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lvm2",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.03.25"
                }
            ],
            "lvm2-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lvm2-libs",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "2.03.25"
                }
            ],
            "lz4-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lz4-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.10.0"
                }
            ],
            "lzo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "lzo",
                    "release": "13.fc41",
                    "source": "rpm",
                    "version": "2.10"
                }
            ],
            "m4": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "m4",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "1.4.19"
                }
            ],
            "make": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "make",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "4.4.1"
                }
            ],
            "man-db": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "man-db",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.12.1"
                }
            ],
            "mdadm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "mdadm",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "4.3"
                }
            ],
            "mokutil": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "mokutil",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.7.1"
                }
            ],
            "mpdecimal": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "mpdecimal",
                    "release": "16.fc41",
                    "source": "rpm",
                    "version": "2.5.1"
                }
            ],
            "mpfr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "mpfr",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "4.2.1"
                }
            ],
            "mt7xxx-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "mt7xxx-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "ncurses": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ncurses",
                    "release": "2.20240629.fc41",
                    "source": "rpm",
                    "version": "6.5"
                }
            ],
            "ncurses-base": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "ncurses-base",
                    "release": "2.20240629.fc41",
                    "source": "rpm",
                    "version": "6.5"
                }
            ],
            "ncurses-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "ncurses-libs",
                    "release": "2.20240629.fc41",
                    "source": "rpm",
                    "version": "6.5"
                }
            ],
            "nettle": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nettle",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "3.10"
                }
            ],
            "nfs-utils": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "nfs-utils",
                    "release": "5.rc2.fc41",
                    "source": "rpm",
                    "version": "2.8.1"
                }
            ],
            "nftables": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "nftables",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "nilfs-utils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nilfs-utils",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "2.2.11"
                }
            ],
            "npth": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "npth",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.7"
                }
            ],
            "nspr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nspr",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "4.36.0"
                }
            ],
            "nss": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.107.0"
                }
            ],
            "nss-softokn": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-softokn",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.107.0"
                }
            ],
            "nss-softokn-freebl": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-softokn-freebl",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.107.0"
                }
            ],
            "nss-sysinit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-sysinit",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.107.0"
                }
            ],
            "nss-util": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "nss-util",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.107.0"
                }
            ],
            "ntfs-3g": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "ntfs-3g",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2022.10.3"
                }
            ],
            "ntfs-3g-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "ntfs-3g-libs",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2022.10.3"
                }
            ],
            "ntfsprogs": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "ntfsprogs",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2022.10.3"
                }
            ],
            "nvidia-gpu-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "nvidia-gpu-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "nxpwireless-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "nxpwireless-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "oniguruma": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "oniguruma",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "6.9.10"
                }
            ],
            "openldap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openldap",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "2.6.8"
                }
            ],
            "openssh": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openssh",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "9.9p1"
                }
            ],
            "openssh-clients": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openssh-clients",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "9.9p1"
                }
            ],
            "openssh-server": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openssh-server",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "9.9p1"
                }
            ],
            "openssl": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "openssl",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "3.2.2"
                }
            ],
            "openssl-devel": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "openssl-devel",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "3.2.2"
                }
            ],
            "openssl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "openssl-libs",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "3.2.2"
                }
            ],
            "openssl-pkcs11": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "openssl-pkcs11",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "0.4.12"
                }
            ],
            "os-prober": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "os-prober",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.81"
                }
            ],
            "p11-kit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "p11-kit",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.25.5"
                }
            ],
            "p11-kit-trust": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "p11-kit-trust",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.25.5"
                }
            ],
            "pam": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pam",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.6.1"
                }
            ],
            "pam-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pam-libs",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.6.1"
                }
            ],
            "parted": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "parted",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "3.6"
                }
            ],
            "passim-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "passim-libs",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.1.8"
                }
            ],
            "pcre2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pcre2",
                    "release": "1.fc41.1",
                    "source": "rpm",
                    "version": "10.44"
                }
            ],
            "pcre2-syntax": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "pcre2-syntax",
                    "release": "1.fc41.1",
                    "source": "rpm",
                    "version": "10.44"
                }
            ],
            "pcsc-lite": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pcsc-lite",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.3.0"
                }
            ],
            "pcsc-lite-ccid": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pcsc-lite-ccid",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "1.6.1"
                }
            ],
            "pcsc-lite-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pcsc-lite-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.3.0"
                }
            ],
            "perl-AutoLoader": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-AutoLoader",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "5.74"
                }
            ],
            "perl-B": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-B",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.89"
                }
            ],
            "perl-Carp": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Carp",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "1.54"
                }
            ],
            "perl-Class-Struct": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-Class-Struct",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "0.68"
                }
            ],
            "perl-Data-Dumper": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-Data-Dumper",
                    "release": "512.fc41",
                    "source": "rpm",
                    "version": "2.189"
                }
            ],
            "perl-Digest": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Digest",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "1.20"
                }
            ],
            "perl-Digest-MD5": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-Digest-MD5",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "2.59"
                }
            ],
            "perl-DynaLoader": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-DynaLoader",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.56"
                }
            ],
            "perl-Encode": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-Encode",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "3.21"
                }
            ],
            "perl-Errno": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-Errno",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.38"
                }
            ],
            "perl-Error": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Error",
                    "release": "16.fc41",
                    "source": "rpm",
                    "version": "0.17029"
                }
            ],
            "perl-Exporter": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Exporter",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "5.78"
                }
            ],
            "perl-Fcntl": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-Fcntl",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.18"
                }
            ],
            "perl-File-Basename": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-File-Basename",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "2.86"
                }
            ],
            "perl-File-Find": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-File-Find",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.44"
                }
            ],
            "perl-File-Path": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-File-Path",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "2.18"
                }
            ],
            "perl-File-Temp": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-File-Temp",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "0.231.100"
                }
            ],
            "perl-File-stat": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-File-stat",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.14"
                }
            ],
            "perl-FileHandle": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-FileHandle",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "2.05"
                }
            ],
            "perl-Getopt-Long": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Getopt-Long",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.58"
                }
            ],
            "perl-Getopt-Std": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-Getopt-Std",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.14"
                }
            ],
            "perl-Git": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Git",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.48.1"
                }
            ],
            "perl-HTTP-Tiny": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-HTTP-Tiny",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.090"
                }
            ],
            "perl-IO": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-IO",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.55"
                }
            ],
            "perl-IO-Socket-IP": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-IO-Socket-IP",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.43"
                }
            ],
            "perl-IO-Socket-SSL": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-IO-Socket-SSL",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.089"
                }
            ],
            "perl-IPC-Open3": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-IPC-Open3",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.22"
                }
            ],
            "perl-MIME-Base32": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-MIME-Base32",
                    "release": "21.fc41",
                    "source": "rpm",
                    "version": "1.303"
                }
            ],
            "perl-MIME-Base64": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-MIME-Base64",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "3.16"
                }
            ],
            "perl-NDBM_File": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-NDBM_File",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.17"
                }
            ],
            "perl-Net-SSLeay": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-Net-SSLeay",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.94"
                }
            ],
            "perl-POSIX": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-POSIX",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "2.20"
                }
            ],
            "perl-PathTools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-PathTools",
                    "release": "512.fc41",
                    "source": "rpm",
                    "version": "3.91"
                }
            ],
            "perl-Pod-Escapes": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Pod-Escapes",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "1.07"
                }
            ],
            "perl-Pod-Perldoc": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Pod-Perldoc",
                    "release": "512.fc41",
                    "source": "rpm",
                    "version": "3.28.01"
                }
            ],
            "perl-Pod-Simple": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-Pod-Simple",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "3.45"
                }
            ],
            "perl-Pod-Usage": [
                {
                    "arch": "noarch",
                    "epoch": 4,
                    "name": "perl-Pod-Usage",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "2.03"
                }
            ],
            "perl-Scalar-List-Utils": [
                {
                    "arch": "x86_64",
                    "epoch": 5,
                    "name": "perl-Scalar-List-Utils",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.68"
                }
            ],
            "perl-SelectSaver": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-SelectSaver",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.02"
                }
            ],
            "perl-Socket": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-Socket",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "2.038"
                }
            ],
            "perl-Storable": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "perl-Storable",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "3.32"
                }
            ],
            "perl-Symbol": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-Symbol",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.09"
                }
            ],
            "perl-Term-ANSIColor": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Term-ANSIColor",
                    "release": "512.fc41",
                    "source": "rpm",
                    "version": "5.01"
                }
            ],
            "perl-Term-Cap": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Term-Cap",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "1.18"
                }
            ],
            "perl-TermReadKey": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "perl-TermReadKey",
                    "release": "23.fc41",
                    "source": "rpm",
                    "version": "2.38"
                }
            ],
            "perl-Text-ParseWords": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Text-ParseWords",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "3.31"
                }
            ],
            "perl-Text-Tabs+Wrap": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-Text-Tabs+Wrap",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "2024.001"
                }
            ],
            "perl-Time-Local": [
                {
                    "arch": "noarch",
                    "epoch": 2,
                    "name": "perl-Time-Local",
                    "release": "511.fc41",
                    "source": "rpm",
                    "version": "1.350"
                }
            ],
            "perl-URI": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-URI",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "5.30"
                }
            ],
            "perl-base": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-base",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "2.27"
                }
            ],
            "perl-constant": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-constant",
                    "release": "512.fc41",
                    "source": "rpm",
                    "version": "1.33"
                }
            ],
            "perl-if": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-if",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "0.61.000"
                }
            ],
            "perl-interpreter": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-interpreter",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "5.40.1"
                }
            ],
            "perl-lib": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-lib",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "0.65"
                }
            ],
            "perl-libnet": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "perl-libnet",
                    "release": "512.fc41",
                    "source": "rpm",
                    "version": "3.15"
                }
            ],
            "perl-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 4,
                    "name": "perl-libs",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "5.40.1"
                }
            ],
            "perl-locale": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-locale",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.12"
                }
            ],
            "perl-mro": [
                {
                    "arch": "x86_64",
                    "epoch": 0,
                    "name": "perl-mro",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.29"
                }
            ],
            "perl-overload": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-overload",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.37"
                }
            ],
            "perl-overloading": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-overloading",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "0.02"
                }
            ],
            "perl-parent": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-parent",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.242"
                }
            ],
            "perl-podlators": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "perl-podlators",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "6.0.2"
                }
            ],
            "perl-vars": [
                {
                    "arch": "noarch",
                    "epoch": 0,
                    "name": "perl-vars",
                    "release": "514.fc41",
                    "source": "rpm",
                    "version": "1.05"
                }
            ],
            "pkcs11-provider": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pkcs11-provider",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.6"
                }
            ],
            "pkgconf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pkgconf",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.3.0"
                }
            ],
            "pkgconf-m4": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "pkgconf-m4",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.3.0"
                }
            ],
            "pkgconf-pkg-config": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "pkgconf-pkg-config",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.3.0"
                }
            ],
            "plymouth": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "plymouth",
                    "release": "14.fc41",
                    "source": "rpm",
                    "version": "24.004.60"
                }
            ],
            "plymouth-core-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "plymouth-core-libs",
                    "release": "14.fc41",
                    "source": "rpm",
                    "version": "24.004.60"
                }
            ],
            "plymouth-scripts": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "plymouth-scripts",
                    "release": "14.fc41",
                    "source": "rpm",
                    "version": "24.004.60"
                }
            ],
            "policycoreutils": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "policycoreutils",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "polkit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "polkit",
                    "release": "1.fc41.1",
                    "source": "rpm",
                    "version": "125"
                }
            ],
            "polkit-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "polkit-libs",
                    "release": "1.fc41.1",
                    "source": "rpm",
                    "version": "125"
                }
            ],
            "popt": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "popt",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.19"
                }
            ],
            "procps-ng": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "procps-ng",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "4.0.4"
                }
            ],
            "protobuf-c": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "protobuf-c",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "psmisc": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "psmisc",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "23.7"
                }
            ],
            "publicsuffix-list-dafsa": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "publicsuffix-list-dafsa",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20250116"
                }
            ],
            "python-pip-wheel": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python-pip-wheel",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "24.2"
                }
            ],
            "python-unversioned-command": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python-unversioned-command",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.13.1"
                }
            ],
            "python3": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.13.1"
                }
            ],
            "python3-attrs": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-attrs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "24.2.0"
                }
            ],
            "python3-audit": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-audit",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.0.3"
                }
            ],
            "python3-blivet": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "python3-blivet",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "3.11.0"
                }
            ],
            "python3-blockdev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-blockdev",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.2.1"
                }
            ],
            "python3-bytesize": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-bytesize",
                    "release": "99.fc41",
                    "source": "rpm",
                    "version": "2.11"
                }
            ],
            "python3-charset-normalizer": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-charset-normalizer",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "3.3.2"
                }
            ],
            "python3-configobj": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-configobj",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "5.0.8"
                }
            ],
            "python3-configshell": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "python3-configshell",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.1.30"
                }
            ],
            "python3-dateutil": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "python3-dateutil",
                    "release": "16.fc41",
                    "source": "rpm",
                    "version": "2.8.2"
                }
            ],
            "python3-dbus": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-dbus",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.3.2"
                }
            ],
            "python3-dbus-client-gen": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dbus-client-gen",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "0.5.1"
                }
            ],
            "python3-dbus-python-client-gen": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dbus-python-client-gen",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.8.3"
                }
            ],
            "python3-dbus-signature-pyparsing": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dbus-signature-pyparsing",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "0.4.1"
                }
            ],
            "python3-distro": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-distro",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "1.9.0"
                }
            ],
            "python3-dnf": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dnf",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "4.22.0"
                }
            ],
            "python3-dnf-plugins-core": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-dnf-plugins-core",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.10.0"
                }
            ],
            "python3-firewall": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-firewall",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.2.3"
                }
            ],
            "python3-gobject-base": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-gobject-base",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "3.48.2"
                }
            ],
            "python3-hawkey": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-hawkey",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.73.4"
                }
            ],
            "python3-idna": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-idna",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "python3-into-dbus-python": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-into-dbus-python",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.8.2"
                }
            ],
            "python3-jinja2": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jinja2",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.1.5"
                }
            ],
            "python3-jsonpatch": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jsonpatch",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "1.33"
                }
            ],
            "python3-jsonpointer": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jsonpointer",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "2.4"
                }
            ],
            "python3-jsonschema": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jsonschema",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "4.19.1"
                }
            ],
            "python3-jsonschema-specifications": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-jsonschema-specifications",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2023.11.2"
                }
            ],
            "python3-justbases": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-justbases",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "0.15.2"
                }
            ],
            "python3-justbytes": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-justbytes",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "0.15.2"
                }
            ],
            "python3-kmod": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-kmod",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "0.9.2"
                }
            ],
            "python3-libcomps": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libcomps",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "0.1.21"
                }
            ],
            "python3-libdnf": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libdnf",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "0.73.4"
                }
            ],
            "python3-libdnf5": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libdnf5",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "5.2.8.1"
                }
            ],
            "python3-libmount": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libmount",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "python3-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libs",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.13.1"
                }
            ],
            "python3-libselinux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libselinux",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "python3-libsemanage": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-libsemanage",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "python3-lxml": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-lxml",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "5.2.1"
                }
            ],
            "python3-markupsafe": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-markupsafe",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "2.1.5"
                }
            ],
            "python3-nftables": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "python3-nftables",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "1.0.9"
                }
            ],
            "python3-oauthlib": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-oauthlib",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "3.2.2"
                }
            ],
            "python3-packaging": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-packaging",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "24.2"
                }
            ],
            "python3-policycoreutils": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-policycoreutils",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "3.7"
                }
            ],
            "python3-psutil": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-psutil",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "5.9.8"
                }
            ],
            "python3-pyparsing": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pyparsing",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "3.1.2"
                }
            ],
            "python3-pyparted": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "python3-pyparted",
                    "release": "7.fc41",
                    "source": "rpm",
                    "version": "3.13.0"
                }
            ],
            "python3-pyserial": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pyserial",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "3.5"
                }
            ],
            "python3-pysocks": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pysocks",
                    "release": "25.fc41",
                    "source": "rpm",
                    "version": "1.7.1"
                }
            ],
            "python3-pyudev": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-pyudev",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.24.3"
                }
            ],
            "python3-pyyaml": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-pyyaml",
                    "release": "18.fc41",
                    "source": "rpm",
                    "version": "6.0.1"
                }
            ],
            "python3-referencing": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-referencing",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "0.35.1"
                }
            ],
            "python3-requests": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-requests",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "2.32.3"
                }
            ],
            "python3-rpds-py": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-rpds-py",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.19.1"
                }
            ],
            "python3-rpm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-rpm",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.20.0"
                }
            ],
            "python3-rtslib": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-rtslib",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "2.1.76"
                }
            ],
            "python3-setools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-setools",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "4.5.1"
                }
            ],
            "python3-setuptools": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-setuptools",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "69.2.0"
                }
            ],
            "python3-six": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-six",
                    "release": "23.fc41",
                    "source": "rpm",
                    "version": "1.16.0"
                }
            ],
            "python3-systemd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "python3-systemd",
                    "release": "11.fc41",
                    "source": "rpm",
                    "version": "235"
                }
            ],
            "python3-typing-extensions": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-typing-extensions",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "4.12.2"
                }
            ],
            "python3-urllib3": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-urllib3",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.26.20"
                }
            ],
            "python3-urllib3+socks": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-urllib3+socks",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.26.20"
                }
            ],
            "python3-urwid": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-urwid",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.6.14"
                }
            ],
            "python3-wcwidth": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "python3-wcwidth",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "0.2.13"
                }
            ],
            "qa-tools": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "qa-tools",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "4.1"
                }
            ],
            "quota": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "quota",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "4.09"
                }
            ],
            "quota-nls": [
                {
                    "arch": "noarch",
                    "epoch": 1,
                    "name": "quota-nls",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "4.09"
                }
            ],
            "readline": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "readline",
                    "release": "10.fc41",
                    "source": "rpm",
                    "version": "8.2"
                }
            ],
            "realtek-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "realtek-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "restraint": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "restraint",
                    "release": "1.fc41eng",
                    "source": "rpm",
                    "version": "0.4.5"
                }
            ],
            "restraint-rhts": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "restraint-rhts",
                    "release": "1.fc41eng",
                    "source": "rpm",
                    "version": "0.4.5"
                }
            ],
            "rng-tools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rng-tools",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "6.17"
                }
            ],
            "rootfiles": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "rootfiles",
                    "release": "37.fc41",
                    "source": "rpm",
                    "version": "8.1"
                }
            ],
            "rpcbind": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpcbind",
                    "release": "1.rc1.fc41",
                    "source": "rpm",
                    "version": "1.2.7"
                }
            ],
            "rpm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.20.0"
                }
            ],
            "rpm-build-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-build-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.20.0"
                }
            ],
            "rpm-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.20.0"
                }
            ],
            "rpm-plugin-selinux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-plugin-selinux",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.20.0"
                }
            ],
            "rpm-sequoia": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-sequoia",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "1.7.0"
                }
            ],
            "rpm-sign-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rpm-sign-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "4.20.0"
                }
            ],
            "rsync": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rsync",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.4.1"
                }
            ],
            "rtl-sdr": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "rtl-sdr",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.0.1"
                }
            ],
            "sdbus-cpp": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sdbus-cpp",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "1.5.0"
                }
            ],
            "sed": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sed",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "4.9"
                }
            ],
            "selinux-policy": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "selinux-policy",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "41.31"
                }
            ],
            "selinux-policy-targeted": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "selinux-policy-targeted",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "41.31"
                }
            ],
            "setup": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "setup",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "2.15.0"
                }
            ],
            "shadow-utils": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "shadow-utils",
                    "release": "12.fc41",
                    "source": "rpm",
                    "version": "4.15.1"
                }
            ],
            "shared-mime-info": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "shared-mime-info",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2.3"
                }
            ],
            "sqlite-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sqlite-libs",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "3.46.1"
                }
            ],
            "sssd-client": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-client",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "sssd-common": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-common",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "sssd-kcm": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-kcm",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "sssd-krb5-common": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-krb5-common",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "sssd-nfs-idmap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sssd-nfs-idmap",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.10.2"
                }
            ],
            "strace": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "strace",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "6.13"
                }
            ],
            "stratis-cli": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "stratis-cli",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.7.0"
                }
            ],
            "stratisd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "stratisd",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "3.7.3"
                }
            ],
            "sudo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "sudo",
                    "release": "5.p5.fc41",
                    "source": "rpm",
                    "version": "1.9.15"
                }
            ],
            "systemd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "256.11"
                }
            ],
            "systemd-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "256.11"
                }
            ],
            "systemd-networkd": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-networkd",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "256.11"
                }
            ],
            "systemd-pam": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-pam",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "256.11"
                }
            ],
            "systemd-resolved": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-resolved",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "256.11"
                }
            ],
            "systemd-udev": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemd-udev",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "256.11"
                }
            ],
            "systemtap": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "5.2"
                }
            ],
            "systemtap-client": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap-client",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "5.2"
                }
            ],
            "systemtap-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap-devel",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "5.2"
                }
            ],
            "systemtap-runtime": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "systemtap-runtime",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "5.2"
                }
            ],
            "tar": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "tar",
                    "release": "4.fc41",
                    "source": "rpm",
                    "version": "1.35"
                }
            ],
            "target-restore": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "target-restore",
                    "release": "9.fc41",
                    "source": "rpm",
                    "version": "2.1.76"
                }
            ],
            "targetcli": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "targetcli",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "2.1.58"
                }
            ],
            "tbb": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "tbb",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2021.13.0"
                }
            ],
            "time": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "time",
                    "release": "24.fc41",
                    "source": "rpm",
                    "version": "1.9"
                }
            ],
            "tiwilink-firmware": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "tiwilink-firmware",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "20241210"
                }
            ],
            "tpm2-tools": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "tpm2-tools",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "5.7"
                }
            ],
            "tpm2-tss": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "tpm2-tss",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "4.1.3"
                }
            ],
            "tpm2-tss-fapi": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "tpm2-tss-fapi",
                    "release": "3.fc41",
                    "source": "rpm",
                    "version": "4.1.3"
                }
            ],
            "tzdata": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "tzdata",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2024b"
                }
            ],
            "udisks2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "udisks2",
                    "release": "6.fc41",
                    "source": "rpm",
                    "version": "2.10.1"
                }
            ],
            "unbound-anchor": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "unbound-anchor",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.22.0"
                }
            ],
            "unbound-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "unbound-libs",
                    "release": "8.fc41",
                    "source": "rpm",
                    "version": "1.22.0"
                }
            ],
            "unzip": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "unzip",
                    "release": "64.fc41",
                    "source": "rpm",
                    "version": "6.0"
                }
            ],
            "userspace-rcu": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "userspace-rcu",
                    "release": "5.fc41",
                    "source": "rpm",
                    "version": "0.14.0"
                }
            ],
            "util-linux": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "util-linux",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "util-linux-core": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "util-linux-core",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.40.4"
                }
            ],
            "vdo": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "vdo",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "8.3.0.73"
                }
            ],
            "vim-common": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "vim-common",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "9.1.1000"
                }
            ],
            "vim-data": [
                {
                    "arch": "noarch",
                    "epoch": 2,
                    "name": "vim-data",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "9.1.1000"
                }
            ],
            "vim-default-editor": [
                {
                    "arch": "noarch",
                    "epoch": 2,
                    "name": "vim-default-editor",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "9.1.1000"
                }
            ],
            "vim-enhanced": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "vim-enhanced",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "9.1.1000"
                }
            ],
            "vim-filesystem": [
                {
                    "arch": "noarch",
                    "epoch": 2,
                    "name": "vim-filesystem",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "9.1.1000"
                }
            ],
            "vim-minimal": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "vim-minimal",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "9.1.1000"
                }
            ],
            "volume_key-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "volume_key-libs",
                    "release": "23.fc41",
                    "source": "rpm",
                    "version": "0.3.12"
                }
            ],
            "wget2": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "wget2",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.2.0"
                }
            ],
            "wget2-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "wget2-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.2.0"
                }
            ],
            "wget2-wget": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "wget2-wget",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.2.0"
                }
            ],
            "which": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "which",
                    "release": "42.fc41",
                    "source": "rpm",
                    "version": "2.21"
                }
            ],
            "xfsprogs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "xfsprogs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "6.9.0"
                }
            ],
            "xkeyboard-config": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "xkeyboard-config",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "2.42"
                }
            ],
            "xxd": [
                {
                    "arch": "x86_64",
                    "epoch": 2,
                    "name": "xxd",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "9.1.1000"
                }
            ],
            "xxhash-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "xxhash-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "0.8.3"
                }
            ],
            "xz": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "xz",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "5.6.2"
                }
            ],
            "xz-devel": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "xz-devel",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "5.6.2"
                }
            ],
            "xz-libs": [
                {
                    "arch": "x86_64",
                    "epoch": 1,
                    "name": "xz-libs",
                    "release": "2.fc41",
                    "source": "rpm",
                    "version": "5.6.2"
                }
            ],
            "zchunk-libs": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zchunk-libs",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "1.5.1"
                }
            ],
            "zip": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zip",
                    "release": "41.fc41",
                    "source": "rpm",
                    "version": "3.0"
                }
            ],
            "zlib-ng-compat": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zlib-ng-compat",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.2.3"
                }
            ],
            "zlib-ng-compat-devel": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zlib-ng-compat-devel",
                    "release": "1.fc41",
                    "source": "rpm",
                    "version": "2.2.3"
                }
            ],
            "zram-generator": [
                {
                    "arch": "x86_64",
                    "epoch": null,
                    "name": "zram-generator",
                    "release": "12.fc41",
                    "source": "rpm",
                    "version": "1.1.2"
                }
            ],
            "zram-generator-defaults": [
                {
                    "arch": "noarch",
                    "epoch": null,
                    "name": "zram-generator-defaults",
                    "release": "12.fc41",
                    "source": "rpm",
                    "version": "1.1.2"
                }
            ]
        }
    },
    "changed": false
}

TASK [Set blivet package name] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:28
Saturday 08 February 2025  18:30:59 -0500 (0:00:01.504)       0:00:14.062 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "blivet_pkg_name": [
            "python3-blivet"
        ]
    },
    "changed": false
}

TASK [Set blivet package version] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:32
Saturday 08 February 2025  18:30:59 -0500 (0:00:00.103)       0:00:14.166 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "blivet_pkg_version": "3.11.0-3.fc41"
    },
    "changed": false
}

TASK [Set distribution version] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:36
Saturday 08 February 2025  18:31:00 -0500 (0:00:00.104)       0:00:14.270 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "is_fedora": true,
        "is_rhel10": false,
        "is_rhel78": false,
        "is_rhel9": false
    },
    "changed": false
}

TASK [Get unused disks] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:47
Saturday 08 February 2025  18:31:00 -0500 (0:00:00.101)       0:00:14.372 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node3

TASK [Ensure test packages] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2
Saturday 08 February 2025  18:31:00 -0500 (0:00:00.105)       0:00:14.477 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: util-linux-core

TASK [Find unused disks in the system] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11
Saturday 08 February 2025  18:31:01 -0500 (0:00:01.471)       0:00:15.948 ***** 
ok: [managed-node3] => {
    "changed": false,
    "disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi"
    ],
    "info": [
        "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"ext4\" LOG-SEC=\"512\"",
        "Line type [part] is not disk: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"ext4\" LOG-SEC=\"512\"",
        "Line: NAME=\"/dev/zram0\" TYPE=\"disk\" SIZE=\"3893362688\" FSTYPE=\"\" LOG-SEC=\"4096\"",
        "filename [xvda2] is a partition",
        "filename [xvda1] is a partition",
        "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions",
        "Disk [/dev/zram0] attrs [{'type': 'disk', 'size': '3893362688', 'fstype': '', 'ssize': '4096'}] size is less than requested"
    ]
}

TASK [Debug why there are no unused disks] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20
Saturday 08 February 2025  18:31:02 -0500 (0:00:00.568)       0:00:16.516 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'Unable to find unused disk' in unused_disks_return.disks",
    "skip_reason": "Conditional result was False"
}

TASK [Set unused_disks if necessary] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29
Saturday 08 February 2025  18:31:02 -0500 (0:00:00.034)       0:00:16.550 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "unused_disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ]
    },
    "changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] ********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34
Saturday 08 February 2025  18:31:02 -0500 (0:00:00.039)       0:00:16.590 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)",
    "skip_reason": "Conditional result was False"
}

TASK [Print unused disks] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39
Saturday 08 February 2025  18:31:02 -0500 (0:00:00.057)       0:00:16.647 ***** 
ok: [managed-node3] => {
    "unused_disks": [
        "sda",
        "sdb",
        "sdc",
        "sdd",
        "sde",
        "sdf",
        "sdg",
        "sdh",
        "sdi"
    ]
}

TASK [Start stratisd service] **************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:55
Saturday 08 February 2025  18:31:02 -0500 (0:00:00.038)       0:00:16.686 ***** 
changed: [managed-node3] => {
    "changed": true,
    "name": "stratisd",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "local-fs.target systemd-journald.socket system.slice dbus.socket",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.storage.stratis3",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "no",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "Stratis daemon",
        "DevicePolicy": "auto",
        "Documentation": "\"man:stratisd(8)\"",
        "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3893915648",
        "EffectiveMemoryMax": "3893915648",
        "EffectiveTasksMax": "4417",
        "Environment": "RUST_BACKTRACE=1 SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecStart": "{ path=/usr/libexec/stratisd ; argv[]=/usr/libexec/stratisd --log-level debug ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/libexec/stratisd ; argv[]=/usr/libexec/stratisd --log-level debug ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/stratisd.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "stratisd.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "process",
        "KillSignal": "2",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "14724",
        "LimitNPROCSoft": "14724",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "14724",
        "LimitSIGPENDINGSoft": "14724",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3381026816",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "stratisd.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "system.slice dbus.socket",
        "Restart": "on-abort",
        "RestartKillSignal": "2",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestamp": "Sat 2025-02-08 17:24:28 EST",
        "StateChangeTimestampMonotonic": "314168903",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "4417",
        "TimeoutAbortUSec": "45s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "45s",
        "TimeoutStopFailureMode": "abort",
        "TimeoutStopUSec": "45s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "enabled",
        "UtmpMode": "init",
        "WantedBy": "multi-user.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [Create one Stratis pool with one volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:60
Saturday 08 February 2025  18:31:03 -0500 (0:00:01.139)       0:00:17.826 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:31:03 -0500 (0:00:00.196)       0:00:18.023 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:31:03 -0500 (0:00:00.144)       0:00:18.168 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.160)       0:00:18.328 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.101)       0:00:18.430 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.045)       0:00:18.476 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.046)       0:00:18.522 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.049)       0:00:18.571 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.056)       0:00:18.628 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.129)       0:00:18.758 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.069)       0:00:18.828 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.068)       0:00:18.896 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.065)       0:00:18.961 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.064)       0:00:19.026 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.072)       0:00:19.099 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:31:04 -0500 (0:00:00.069)       0:00:19.168 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:31:05 -0500 (0:00:00.064)       0:00:19.233 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:31:05 -0500 (0:00:00.120)       0:00:19.353 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:31:05 -0500 (0:00:00.067)       0:00:19.420 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0",
        "/dev/stratis/foo/test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
            "state": "mounted"
        }
    ],
    "packages": [
        "e2fsprogs",
        "xfsprogs",
        "stratis-cli",
        "stratisd"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:31:16 -0500 (0:00:11.376)       0:00:30.797 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:31:16 -0500 (0:00:00.081)       0:00:30.878 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057334.8215806,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057334.8205807,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057334.8205807,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:31:17 -0500 (0:00:00.571)       0:00:31.450 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:31:17 -0500 (0:00:00.689)       0:00:32.139 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:31:18 -0500 (0:00:00.057)       0:00:32.197 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0",
            "/dev/stratis/foo/test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                "state": "mounted"
            }
        ],
        "packages": [
            "e2fsprogs",
            "xfsprogs",
            "stratis-cli",
            "stratisd"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:31:18 -0500 (0:00:00.074)       0:00:32.271 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:31:18 -0500 (0:00:00.070)       0:00:32.342 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:31:18 -0500 (0:00:00.072)       0:00:32.415 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:31:18 -0500 (0:00:00.085)       0:00:32.500 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:31:19 -0500 (0:00:00.948)       0:00:33.449 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:31:20 -0500 (0:00:00.816)       0:00:34.265 ***** 
skipping: [managed-node3] => (item={'src': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:31:20 -0500 (0:00:00.157)       0:00:34.423 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:31:21 -0500 (0:00:00.929)       0:00:35.352 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:31:21 -0500 (0:00:00.435)       0:00:35.787 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:31:21 -0500 (0:00:00.033)       0:00:35.821 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:73
Saturday 08 February 2025  18:31:22 -0500 (0:00:01.053)       0:00:36.874 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:31:22 -0500 (0:00:00.127)       0:00:37.001 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:31:22 -0500 (0:00:00.122)       0:00:37.124 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:31:23 -0500 (0:00:00.096)       0:00:37.221 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
        },
        "/dev/sdc": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
        },
        "/dev/sdd": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
        },
        "/dev/sde": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
        },
        "/dev/sdf": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
        },
        "/dev/sdg": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
        },
        "/dev/sdh": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
        },
        "/dev/sdi": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:31:23 -0500 (0:00:00.701)       0:00:37.922 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003616",
    "end": "2025-02-08 18:31:24.268017",
    "rc": 0,
    "start": "2025-02-08 18:31:24.264401"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=2b39409e-cf08-47bd-ab00-2dbbae942863 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:31:24 -0500 (0:00:00.633)       0:00:38.556 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003275",
    "end": "2025-02-08 18:31:24.823816",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:31:24.820541"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:31:24 -0500 (0:00:00.518)       0:00:39.074 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:31:24 -0500 (0:00:00.086)       0:00:39.161 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.047)       0:00:39.208 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.053)       0:00:39.261 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.043)       0:00:39.305 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.117)       0:00:39.422 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.040)       0:00:39.462 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.036)       0:00:39.499 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.034)       0:00:39.533 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.039)       0:00:39.572 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.036)       0:00:39.608 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.036)       0:00:39.645 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.042)       0:00:39.687 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.055)       0:00:39.743 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:31:25 -0500 (0:00:00.041)       0:00:39.785 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.472)       0:00:40.257 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.052)       0:00:40.310 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.087)       0:00:40.398 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.047)       0:00:40.445 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.046)       0:00:40.492 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.038)       0:00:40.531 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.036)       0:00:40.567 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.036)       0:00:40.604 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.040)       0:00:40.644 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.069)       0:00:40.713 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.062)       0:00:40.776 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.063)       0:00:40.839 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.059)       0:00:40.898 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.090)       0:00:40.989 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.085)       0:00:41.075 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:31:26 -0500 (0:00:00.042)       0:00:41.118 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.074)       0:00:41.193 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.043)       0:00:41.237 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.113)       0:00:41.350 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.091)       0:00:41.442 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.078)       0:00:41.521 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.070)       0:00:41.591 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.071)       0:00:41.662 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.097)       0:00:41.760 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.047)       0:00:41.807 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:31:27 -0500 (0:00:00.091)       0:00:41.899 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.469536",
    "end": "2025-02-08 18:31:28.544166",
    "rc": 0,
    "start": "2025-02-08 18:31:28.074630"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdc",
                        "size": "20971520 sectors",
                        "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdd",
                        "size": "2147483648 sectors",
                        "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sde",
                        "size": "2147483648 sectors",
                        "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdf",
                        "size": "20971520 sectors",
                        "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdg",
                        "size": "2147483648 sectors",
                        "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdh",
                        "size": "20971520 sectors",
                        "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdi",
                        "size": "20971520 sectors",
                        "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "16b10e73-7c28-4b2a-84b9-c2d615bf1929"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:31:28 -0500 (0:00:00.935)       0:00:42.834 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdc",
                                "size": "20971520 sectors",
                                "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdd",
                                "size": "2147483648 sectors",
                                "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sde",
                                "size": "2147483648 sectors",
                                "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdf",
                                "size": "20971520 sectors",
                                "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdg",
                                "size": "2147483648 sectors",
                                "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdh",
                                "size": "20971520 sectors",
                                "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdi",
                                "size": "20971520 sectors",
                                "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "16b10e73-7c28-4b2a-84b9-c2d615bf1929"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:31:28 -0500 (0:00:00.092)       0:00:42.927 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:31:28 -0500 (0:00:00.146)       0:00:43.074 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:31:28 -0500 (0:00:00.083)       0:00:43.158 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:31:29 -0500 (0:00:00.074)       0:00:43.232 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:31:29 -0500 (0:00:00.083)       0:00:43.315 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:31:29 -0500 (0:00:00.076)       0:00:43.392 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:31:29 -0500 (0:00:00.122)       0:00:43.515 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:31:29 -0500 (0:00:00.120)       0:00:43.635 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:31:29 -0500 (0:00:00.400)       0:00:44.036 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:31:29 -0500 (0:00:00.115)       0:00:44.151 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.124)       0:00:44.276 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.070)       0:00:44.346 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.072)       0:00:44.419 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.072)       0:00:44.491 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.067)       0:00:44.559 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.066)       0:00:44.625 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.066)       0:00:44.692 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.074)       0:00:44.767 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.069)       0:00:44.836 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.064)       0:00:44.901 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:31:30 -0500 (0:00:00.155)       0:00:45.057 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:31:31 -0500 (0:00:00.128)       0:00:45.185 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:31:31 -0500 (0:00:00.252)       0:00:45.438 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:31:31 -0500 (0:00:00.087)       0:00:45.526 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:31:31 -0500 (0:00:00.123)       0:00:45.649 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:31:31 -0500 (0:00:00.095)       0:00:45.745 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:31:31 -0500 (0:00:00.085)       0:00:45.831 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:31:31 -0500 (0:00:00.092)       0:00:45.923 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057476.4491785,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1739057476.4491785,
        "dev": 6,
        "device_type": 64773,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8138,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1739057476.4491785,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.478)       0:00:46.402 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.048)       0:00:46.451 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.053)       0:00:46.505 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.069)       0:00:46.574 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.067)       0:00:46.642 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.061)       0:00:46.704 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.095)       0:00:46.799 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:31:32 -0500 (0:00:00.067)       0:00:46.867 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:31:34 -0500 (0:00:01.466)       0:00:48.333 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.089)       0:00:48.423 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.106)       0:00:48.529 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.124)       0:00:48.653 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.071)       0:00:48.725 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.077)       0:00:48.802 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.068)       0:00:48.870 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.061)       0:00:48.932 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.094)       0:00:49.027 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:31:34 -0500 (0:00:00.127)       0:00:49.154 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.122)       0:00:49.277 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.081)       0:00:49.359 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.061)       0:00:49.420 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.058)       0:00:49.478 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.047)       0:00:49.526 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.042)       0:00:49.569 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.055)       0:00:49.625 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.044)       0:00:49.670 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.051)       0:00:49.722 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.071)       0:00:49.793 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.051)       0:00:49.844 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.044)       0:00:49.889 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.048)       0:00:49.938 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.043)       0:00:49.981 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:31:35 -0500 (0:00:00.037)       0:00:50.018 ***** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:31:36 -0500 (0:00:00.651)       0:00:50.670 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:31:36 -0500 (0:00:00.122)       0:00:50.793 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:31:36 -0500 (0:00:00.110)       0:00:50.903 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:31:36 -0500 (0:00:00.105)       0:00:51.009 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:31:36 -0500 (0:00:00.113)       0:00:51.122 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.190)       0:00:51.312 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.086)       0:00:51.398 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.100)       0:00:51.499 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.115)       0:00:51.614 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.091)       0:00:51.705 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.095)       0:00:51.801 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.081)       0:00:51.882 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.095)       0:00:51.978 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.097)       0:00:52.075 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:31:37 -0500 (0:00:00.057)       0:00:52.133 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.055)       0:00:52.189 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.068)       0:00:52.257 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.046)       0:00:52.304 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.037)       0:00:52.342 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.038)       0:00:52.380 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.037)       0:00:52.418 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.047)       0:00:52.465 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.059)       0:00:52.525 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.073)       0:00:52.598 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.097)       0:00:52.696 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.110)       0:00:52.807 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.113)       0:00:52.920 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.085)       0:00:53.006 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:31:38 -0500 (0:00:00.092)       0:00:53.099 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.130)       0:00:53.229 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.121)       0:00:53.351 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.100)       0:00:53.451 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.086)       0:00:53.538 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.100)       0:00:53.639 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.101)       0:00:53.740 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.078)       0:00:53.819 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:76
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.090)       0:00:53.910 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:31:39 -0500 (0:00:00.201)       0:00:54.112 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.174)       0:00:54.287 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.173)       0:00:54.460 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.184)       0:00:54.645 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.094)       0:00:54.739 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.078)       0:00:54.817 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.080)       0:00:54.898 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.078)       0:00:54.976 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:31:40 -0500 (0:00:00.150)       0:00:55.127 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.132)       0:00:55.260 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.075)       0:00:55.335 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.069)       0:00:55.405 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.088)       0:00:55.493 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.077)       0:00:55.570 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.074)       0:00:55.645 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.072)       0:00:55.717 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.115)       0:00:55.832 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:31:41 -0500 (0:00:00.063)       0:00:55.896 ***** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo/test1",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
            "state": "mounted"
        }
    ],
    "packages": [
        "e2fsprogs",
        "xfsprogs",
        "stratis-cli",
        "stratisd"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:31:46 -0500 (0:00:04.553)       0:01:00.449 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:31:46 -0500 (0:00:00.091)       0:01:00.541 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057479.9921935,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "bd20ea33dcd030a91563e33fd4dfaf83929363f6",
        "ctime": 1739057479.9911933,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057479.9911933,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:31:46 -0500 (0:00:00.459)       0:01:01.000 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:31:46 -0500 (0:00:00.047)       0:01:01.048 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:31:46 -0500 (0:00:00.033)       0:01:01.082 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo/test1",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                "state": "mounted"
            }
        ],
        "packages": [
            "e2fsprogs",
            "xfsprogs",
            "stratis-cli",
            "stratisd"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:31:46 -0500 (0:00:00.049)       0:01:01.131 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:31:47 -0500 (0:00:00.064)       0:01:01.195 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:31:47 -0500 (0:00:00.075)       0:01:01.271 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:31:47 -0500 (0:00:00.068)       0:01:01.340 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:31:48 -0500 (0:00:00.903)       0:01:02.244 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node3] => (item={'src': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:31:48 -0500 (0:00:00.499)       0:01:02.743 ***** 
skipping: [managed-node3] => (item={'src': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:31:48 -0500 (0:00:00.186)       0:01:02.930 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:31:49 -0500 (0:00:00.952)       0:01:03.882 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:31:50 -0500 (0:00:00.489)       0:01:04.371 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:31:50 -0500 (0:00:00.033)       0:01:04.405 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:89
Saturday 08 February 2025  18:31:51 -0500 (0:00:01.043)       0:01:05.449 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:31:51 -0500 (0:00:00.098)       0:01:05.547 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:31:51 -0500 (0:00:00.073)       0:01:05.620 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:31:51 -0500 (0:00:00.054)       0:01:05.675 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
        },
        "/dev/sdc": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
        },
        "/dev/sdd": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
        },
        "/dev/sde": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
        },
        "/dev/sdf": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
        },
        "/dev/sdg": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
        },
        "/dev/sdh": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
        },
        "/dev/sdi": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:31:51 -0500 (0:00:00.452)       0:01:06.128 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003869",
    "end": "2025-02-08 18:31:52.315629",
    "rc": 0,
    "start": "2025-02-08 18:31:52.311760"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=2b39409e-cf08-47bd-ab00-2dbbae942863 /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:31:52 -0500 (0:00:00.457)       0:01:06.585 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.004103",
    "end": "2025-02-08 18:31:52.779190",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:31:52.775087"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:31:52 -0500 (0:00:00.469)       0:01:07.054 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.152)       0:01:07.207 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.070)       0:01:07.278 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.043)       0:01:07.321 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.043)       0:01:07.365 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.093)       0:01:07.459 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.037)       0:01:07.496 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.034)       0:01:07.531 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.036)       0:01:07.567 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.083)       0:01:07.651 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.063)       0:01:07.714 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.065)       0:01:07.779 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.064)       0:01:07.844 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.067)       0:01:07.912 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:31:53 -0500 (0:00:00.038)       0:01:07.951 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.465)       0:01:08.416 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.058)       0:01:08.474 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.078)       0:01:08.552 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.041)       0:01:08.594 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.059)       0:01:08.653 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.077)       0:01:08.731 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.058)       0:01:08.789 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.048)       0:01:08.838 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.058)       0:01:08.896 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.048)       0:01:08.945 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.037)       0:01:08.982 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.035)       0:01:09.018 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.036)       0:01:09.054 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:31:54 -0500 (0:00:00.042)       0:01:09.097 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.085)       0:01:09.183 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.088)       0:01:09.272 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.129)       0:01:09.401 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.071)       0:01:09.472 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.149)       0:01:09.621 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.120)       0:01:09.741 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.077)       0:01:09.819 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.061)       0:01:09.881 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.075)       0:01:09.957 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.096)       0:01:10.053 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:31:55 -0500 (0:00:00.057)       0:01:10.111 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:31:56 -0500 (0:00:00.135)       0:01:10.246 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.375698",
    "end": "2025-02-08 18:31:56.818543",
    "rc": 0,
    "start": "2025-02-08 18:31:56.442845"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdc",
                        "size": "20971520 sectors",
                        "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdd",
                        "size": "2147483648 sectors",
                        "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sde",
                        "size": "2147483648 sectors",
                        "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdf",
                        "size": "20971520 sectors",
                        "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdg",
                        "size": "2147483648 sectors",
                        "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdh",
                        "size": "20971520 sectors",
                        "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdi",
                        "size": "20971520 sectors",
                        "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "16b10e73-7c28-4b2a-84b9-c2d615bf1929"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:31:56 -0500 (0:00:00.864)       0:01:11.111 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdc",
                                "size": "20971520 sectors",
                                "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdd",
                                "size": "2147483648 sectors",
                                "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sde",
                                "size": "2147483648 sectors",
                                "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdf",
                                "size": "20971520 sectors",
                                "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdg",
                                "size": "2147483648 sectors",
                                "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdh",
                                "size": "20971520 sectors",
                                "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdi",
                                "size": "20971520 sectors",
                                "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "16b10e73-7c28-4b2a-84b9-c2d615bf1929"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.168)       0:01:11.279 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.157)       0:01:11.437 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.101)       0:01:11.538 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.081)       0:01:11.620 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.090)       0:01:11.710 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.152)       0:01:11.863 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.114)       0:01:11.977 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:31:57 -0500 (0:00:00.097)       0:01:12.074 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.295)       0:01:12.370 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.090)       0:01:12.460 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.110)       0:01:12.571 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.064)       0:01:12.636 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.092)       0:01:12.729 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.082)       0:01:12.811 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.079)       0:01:12.890 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.074)       0:01:12.964 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.068)       0:01:13.033 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:31:58 -0500 (0:00:00.068)       0:01:13.101 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.075)       0:01:13.177 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.105)       0:01:13.282 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.186)       0:01:13.468 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.139)       0:01:13.607 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.109)       0:01:13.717 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.087)       0:01:13.805 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.094)       0:01:13.899 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.083)       0:01:13.982 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.073)       0:01:14.056 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:31:59 -0500 (0:00:00.079)       0:01:14.136 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057476.4491785,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1739057476.4491785,
        "dev": 6,
        "device_type": 64773,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8138,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1739057476.4491785,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.495)       0:01:14.631 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.076)       0:01:14.708 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.084)       0:01:14.792 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.077)       0:01:14.870 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.071)       0:01:14.941 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.067)       0:01:15.009 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.074)       0:01:15.083 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:32:00 -0500 (0:00:00.043)       0:01:15.126 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:32:02 -0500 (0:00:01.472)       0:01:16.599 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:32:02 -0500 (0:00:00.085)       0:01:16.684 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:32:02 -0500 (0:00:00.122)       0:01:16.807 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:32:02 -0500 (0:00:00.160)       0:01:16.967 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:32:02 -0500 (0:00:00.101)       0:01:17.069 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:32:02 -0500 (0:00:00.091)       0:01:17.160 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:32:03 -0500 (0:00:00.113)       0:01:17.273 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:32:03 -0500 (0:00:00.093)       0:01:17.366 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:32:03 -0500 (0:00:00.096)       0:01:17.463 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:32:03 -0500 (0:00:00.123)       0:01:17.587 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:32:03 -0500 (0:00:00.290)       0:01:17.877 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:32:03 -0500 (0:00:00.110)       0:01:17.987 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:32:03 -0500 (0:00:00.148)       0:01:18.136 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.122)       0:01:18.259 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.143)       0:01:18.403 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.095)       0:01:18.498 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.101)       0:01:18.600 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.103)       0:01:18.703 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.110)       0:01:18.814 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.099)       0:01:18.914 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.123)       0:01:19.038 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:32:04 -0500 (0:00:00.112)       0:01:19.150 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:32:05 -0500 (0:00:00.094)       0:01:19.246 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:32:05 -0500 (0:00:00.084)       0:01:19.330 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:32:05 -0500 (0:00:00.079)       0:01:19.410 ***** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:32:05 -0500 (0:00:00.656)       0:01:20.066 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.113)       0:01:20.180 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.122)       0:01:20.302 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.091)       0:01:20.394 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.167)       0:01:20.561 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.107)       0:01:20.669 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.081)       0:01:20.751 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.059)       0:01:20.810 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.062)       0:01:20.873 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.043)       0:01:20.916 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.038)       0:01:20.954 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.043)       0:01:20.998 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.079)       0:01:21.078 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:32:06 -0500 (0:00:00.079)       0:01:21.157 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.066)       0:01:21.224 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.065)       0:01:21.289 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.065)       0:01:21.354 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.055)       0:01:21.409 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.043)       0:01:21.453 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.044)       0:01:21.497 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.041)       0:01:21.538 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.045)       0:01:21.584 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.038)       0:01:21.622 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.037)       0:01:21.660 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.042)       0:01:21.703 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.047)       0:01:21.750 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.052)       0:01:21.803 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.038)       0:01:21.841 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.037)       0:01:21.878 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.089)       0:01:21.968 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.039)       0:01:22.007 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.038)       0:01:22.046 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.038)       0:01:22.084 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:32:07 -0500 (0:00:00.042)       0:01:22.126 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.061)       0:01:22.188 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.034)       0:01:22.222 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Add second filesystem to the pool] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:92
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.038)       0:01:22.261 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.089)       0:01:22.351 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.063)       0:01:22.414 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.067)       0:01:22.482 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.083)       0:01:22.566 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.042)       0:01:22.609 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.042)       0:01:22.652 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.045)       0:01:22.697 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.051)       0:01:22.749 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.137)       0:01:22.887 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.074)       0:01:22.962 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                },
                {
                    "mount_point": "/opt/test2",
                    "name": "test2",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.078)       0:01:23.040 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:32:08 -0500 (0:00:00.072)       0:01:23.113 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:32:09 -0500 (0:00:00.073)       0:01:23.187 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:32:09 -0500 (0:00:00.102)       0:01:23.290 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:32:09 -0500 (0:00:00.077)       0:01:23.368 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:32:09 -0500 (0:00:00.075)       0:01:23.443 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:32:09 -0500 (0:00:00.115)       0:01:23.559 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:32:09 -0500 (0:00:00.065)       0:01:23.624 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create device",
            "device": "/dev/stratis/foo/test2",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/stratis/foo/test2",
            "fs_type": "stratis xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo/test1",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0",
        "/dev/stratis/foo/test2"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
            "state": "mounted"
        },
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test2",
            "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
            "state": "mounted"
        }
    ],
    "packages": [
        "stratisd",
        "e2fsprogs",
        "xfsprogs",
        "stratis-cli"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_kernel_device": "/dev/dm-6",
                    "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "_raw_kernel_device": "/dev/dm-6",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:32:19 -0500 (0:00:10.219)       0:01:33.844 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:32:19 -0500 (0:00:00.086)       0:01:33.930 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057479.9921935,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "bd20ea33dcd030a91563e33fd4dfaf83929363f6",
        "ctime": 1739057479.9911933,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057479.9911933,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:32:20 -0500 (0:00:00.516)       0:01:34.447 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:32:20 -0500 (0:00:00.539)       0:01:34.986 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:32:20 -0500 (0:00:00.095)       0:01:35.082 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create device",
                "device": "/dev/stratis/foo/test2",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/stratis/foo/test2",
                "fs_type": "stratis xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo/test1",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0",
            "/dev/stratis/foo/test2"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                "state": "mounted"
            },
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test2",
                "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                "state": "mounted"
            }
        ],
        "packages": [
            "stratisd",
            "e2fsprogs",
            "xfsprogs",
            "stratis-cli"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_kernel_device": "/dev/dm-6",
                        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "_raw_kernel_device": "/dev/dm-6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:32:20 -0500 (0:00:00.082)       0:01:35.165 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-5",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-5",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_kernel_device": "/dev/dm-6",
                        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "_raw_kernel_device": "/dev/dm-6",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:32:21 -0500 (0:00:00.099)       0:01:35.264 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:32:21 -0500 (0:00:00.057)       0:01:35.322 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:32:21 -0500 (0:00:00.061)       0:01:35.384 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:32:22 -0500 (0:00:00.886)       0:01:36.271 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node3] => (item={'src': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863"
}
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "state": "mounted"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:32:23 -0500 (0:00:00.954)       0:01:37.225 ***** 
skipping: [managed-node3] => (item={'src': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item={'src': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test2",
        "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:32:23 -0500 (0:00:00.112)       0:01:37.337 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:32:24 -0500 (0:00:00.985)       0:01:38.322 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:32:24 -0500 (0:00:00.492)       0:01:38.815 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:32:24 -0500 (0:00:00.069)       0:01:38.884 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:108
Saturday 08 February 2025  18:32:26 -0500 (0:00:01.568)       0:01:40.452 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:32:26 -0500 (0:00:00.202)       0:01:40.655 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-5",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-5",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_kernel_device": "/dev/dm-6",
                    "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "_raw_kernel_device": "/dev/dm-6",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:32:26 -0500 (0:00:00.111)       0:01:40.767 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:32:26 -0500 (0:00:00.090)       0:01:40.858 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-16b10e737c284b2a84b9c2d615bf1929-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
        },
        "/dev/sdc": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
        },
        "/dev/sdd": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
        },
        "/dev/sde": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
        },
        "/dev/sdf": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
        },
        "/dev/sdg": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
        },
        "/dev/sdh": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
        },
        "/dev/sdi": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
        },
        "/dev/stratis/foo/test2": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test2",
            "name": "/dev/stratis/foo/test2",
            "size": "4G",
            "type": "stratis",
            "uuid": "664da094-bb3a-4735-be34-505e74d1f599"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:32:27 -0500 (0:00:00.552)       0:01:41.411 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003251",
    "end": "2025-02-08 18:32:27.667560",
    "rc": 0,
    "start": "2025-02-08 18:32:27.664309"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=2b39409e-cf08-47bd-ab00-2dbbae942863 /opt/test1 xfs defaults 0 0
UUID=664da094-bb3a-4735-be34-505e74d1f599 /opt/test2 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:32:27 -0500 (0:00:00.599)       0:01:42.010 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003210",
    "end": "2025-02-08 18:32:28.282642",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:32:28.279432"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:32:28 -0500 (0:00:00.570)       0:01:42.581 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'}, {'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:32:28 -0500 (0:00:00.212)       0:01:42.794 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:32:28 -0500 (0:00:00.093)       0:01:42.887 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:32:28 -0500 (0:00:00.078)       0:01:42.966 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:32:28 -0500 (0:00:00.063)       0:01:43.029 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:32:28 -0500 (0:00:00.137)       0:01:43.166 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.065)       0:01:43.232 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.060)       0:01:43.293 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.071)       0:01:43.364 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.070)       0:01:43.434 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.065)       0:01:43.499 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.065)       0:01:43.565 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.068)       0:01:43.634 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.070)       0:01:43.704 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:32:29 -0500 (0:00:00.058)       0:01:43.763 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.512)       0:01:44.275 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.055)       0:01:44.331 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.101)       0:01:44.432 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.038)       0:01:44.471 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.041)       0:01:44.512 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.042)       0:01:44.555 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.062)       0:01:44.618 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.063)       0:01:44.682 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.070)       0:01:44.753 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.072)       0:01:44.825 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.097)       0:01:44.923 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.075)       0:01:44.999 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.057)       0:01:45.056 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:32:30 -0500 (0:00:00.056)       0:01:45.112 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.089)       0:01:45.201 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_kernel_device": "/dev/dm-6",
        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "_raw_device": "/dev/stratis/foo/test2",
        "_raw_kernel_device": "/dev/dm-6",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.053)       0:01:45.255 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.126)       0:01:45.382 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_kernel_device": "/dev/dm-6",
        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "_raw_device": "/dev/stratis/foo/test2",
        "_raw_kernel_device": "/dev/dm-6",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.099)       0:01:45.481 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.102)       0:01:45.584 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.063)       0:01:45.648 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.038)       0:01:45.686 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.034)       0:01:45.721 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.044)       0:01:45.765 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.079)       0:01:45.845 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-5",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-5",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_kernel_device": "/dev/dm-6",
        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "_raw_device": "/dev/stratis/foo/test2",
        "_raw_kernel_device": "/dev/dm-6",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.054)       0:01:45.900 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:32:31 -0500 (0:00:00.142)       0:01:46.042 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.375264",
    "end": "2025-02-08 18:32:32.626279",
    "rc": 0,
    "start": "2025-02-08 18:32:32.251015"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdc",
                        "size": "20971520 sectors",
                        "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdd",
                        "size": "2147483648 sectors",
                        "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sde",
                        "size": "2147483648 sectors",
                        "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdf",
                        "size": "20971520 sectors",
                        "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdg",
                        "size": "2147483648 sectors",
                        "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdh",
                        "size": "20971520 sectors",
                        "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": false,
                        "path": "/dev/sdi",
                        "size": "20971520 sectors",
                        "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test2",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "664da094-bb3a-4735-be34-505e74d1f599"
                },
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "16b10e73-7c28-4b2a-84b9-c2d615bf1929"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:32:32 -0500 (0:00:00.862)       0:01:46.905 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "50680d0c-03bd-4f76-925a-a05d5f71b4ae"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "c7c8e77b-f93a-413a-86c4-57dc1a5b456a"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdc",
                                "size": "20971520 sectors",
                                "uuid": "bcfba5ec-d005-4f6f-a370-10f104355b62"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdd",
                                "size": "2147483648 sectors",
                                "uuid": "08ba2c2c-855e-451c-8337-a4327e64d1c6"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sde",
                                "size": "2147483648 sectors",
                                "uuid": "72263662-d2ea-4fd6-92b6-cad6f0d0acff"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdf",
                                "size": "20971520 sectors",
                                "uuid": "1bbf6eaa-71ab-4e8b-a161-926902dc29f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdg",
                                "size": "2147483648 sectors",
                                "uuid": "1457869e-7e69-45df-9adf-362ef4b5c376"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdh",
                                "size": "20971520 sectors",
                                "uuid": "9fba51fb-d433-4b5d-83e5-5b31a2f888af"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": false,
                                "path": "/dev/sdi",
                                "size": "20971520 sectors",
                                "uuid": "c7af15fa-8905-4e5f-b629-64fc8b26a590"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test2",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "664da094-bb3a-4735-be34-505e74d1f599"
                        },
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "2b39409e-cf08-47bd-ab00-2dbbae942863"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "16b10e73-7c28-4b2a-84b9-c2d615bf1929"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:32:32 -0500 (0:00:00.094)       0:01:46.999 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:32:32 -0500 (0:00:00.138)       0:01:47.138 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.054)       0:01:47.192 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.060)       0:01:47.252 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.047)       0:01:47.300 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.045)       0:01:47.345 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', '_kernel_device': '/dev/dm-5', '_raw_kernel_device': '/dev/dm-5'})
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.076)       0:01:47.422 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.064)       0:01:47.486 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.294)       0:01:47.780 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.063)       0:01:47.844 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.071)       0:01:47.916 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.038)       0:01:47.955 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.044)       0:01:48.000 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.042)       0:01:48.042 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:32:33 -0500 (0:00:00.066)       0:01:48.109 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.063)       0:01:48.172 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.053)       0:01:48.225 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.042)       0:01:48.268 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.049)       0:01:48.318 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.048)       0:01:48.366 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.078)       0:01:48.445 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.062)       0:01:48.507 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.108)       0:01:48.617 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.119)       0:01:48.736 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.098)       0:01:48.835 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.073)       0:01:48.908 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:32:34 -0500 (0:00:00.172)       0:01:49.080 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.111)       0:01:49.192 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057476.4491785,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1739057476.4491785,
        "dev": 6,
        "device_type": 64773,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8138,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1739057476.4491785,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.531)       0:01:49.724 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.045)       0:01:49.770 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.043)       0:01:49.814 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.045)       0:01:49.859 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.042)       0:01:49.902 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.037)       0:01:49.939 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.056)       0:01:49.996 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:32:35 -0500 (0:00:00.061)       0:01:50.058 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:32:37 -0500 (0:00:01.461)       0:01:51.520 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.038)       0:01:51.558 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.042)       0:01:51.601 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.064)       0:01:51.665 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.059)       0:01:51.724 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.076)       0:01:51.801 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.085)       0:01:51.886 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.044)       0:01:51.931 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.045)       0:01:51.976 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.070)       0:01:52.047 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.063)       0:01:52.110 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:32:37 -0500 (0:00:00.054)       0:01:52.164 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.053)       0:01:52.218 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.057)       0:01:52.275 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.071)       0:01:52.347 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.067)       0:01:52.414 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.066)       0:01:52.480 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.067)       0:01:52.548 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.073)       0:01:52.621 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.046)       0:01:52.668 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.045)       0:01:52.713 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.046)       0:01:52.760 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.047)       0:01:52.808 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.040)       0:01:52.848 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:32:38 -0500 (0:00:00.059)       0:01:52.908 ***** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.463)       0:01:53.371 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.061)       0:01:53.433 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.054)       0:01:53.488 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.054)       0:01:53.542 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.086)       0:01:53.628 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.073)       0:01:53.701 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.100)       0:01:53.802 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.085)       0:01:53.887 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:32:39 -0500 (0:00:00.122)       0:01:54.010 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.225)       0:01:54.236 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.076)       0:01:54.312 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.078)       0:01:54.391 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.074)       0:01:54.466 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.072)       0:01:54.538 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.067)       0:01:54.606 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.067)       0:01:54.673 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.098)       0:01:54.772 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.117)       0:01:54.889 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.090)       0:01:54.980 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.114)       0:01:55.095 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:32:40 -0500 (0:00:00.076)       0:01:55.171 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.089)       0:01:55.260 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.072)       0:01:55.333 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.062)       0:01:55.395 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.069)       0:01:55.465 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.078)       0:01:55.544 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.118)       0:01:55.662 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.079)       0:01:55.741 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.074)       0:01:55.815 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.078)       0:01:55.894 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.065)       0:01:55.960 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.066)       0:01:56.026 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:32:41 -0500 (0:00:00.068)       0:01:56.095 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.078)       0:01:56.173 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.069)       0:01:56.242 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.095)       0:01:56.337 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.260)       0:01:56.597 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test2"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.128)       0:01:56.725 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.131)       0:01:56.857 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.081)       0:01:56.939 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.088)       0:01:57.028 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.067)       0:01:57.096 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:32:42 -0500 (0:00:00.068)       0:01:57.164 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.107)       0:01:57.272 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.066)       0:01:57.338 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.067)       0:01:57.406 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.066)       0:01:57.472 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.079)       0:01:57.552 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=664da094-bb3a-4735-be34-505e74d1f599 "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test2 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test2 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.131)       0:01:57.684 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.127)       0:01:57.811 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.116)       0:01:57.928 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:32:43 -0500 (0:00:00.115)       0:01:58.043 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.322)       0:01:58.366 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.066)       0:01:58.432 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.052)       0:01:58.485 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.044)       0:01:58.530 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057539.4884446,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1739057539.4884446,
        "dev": 6,
        "device_type": 64774,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8172,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1739057539.4884446,
        "nlink": 1,
        "path": "/dev/stratis/foo/test2",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.466)       0:01:58.997 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.052)       0:01:59.049 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.045)       0:01:59.095 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:32:44 -0500 (0:00:00.047)       0:01:59.142 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:32:45 -0500 (0:00:00.046)       0:01:59.189 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:32:45 -0500 (0:00:00.038)       0:01:59.227 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:32:45 -0500 (0:00:00.044)       0:01:59.272 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:32:45 -0500 (0:00:00.045)       0:01:59.317 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:32:46 -0500 (0:00:01.482)       0:02:00.800 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:32:46 -0500 (0:00:00.076)       0:02:00.877 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:32:46 -0500 (0:00:00.097)       0:02:00.974 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:32:46 -0500 (0:00:00.140)       0:02:01.115 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.117)       0:02:01.233 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.068)       0:02:01.301 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.083)       0:02:01.385 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.070)       0:02:01.455 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.105)       0:02:01.561 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.109)       0:02:01.670 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.160)       0:02:01.830 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.070)       0:02:01.901 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.088)       0:02:01.989 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.053)       0:02:02.043 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.042)       0:02:02.086 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.038)       0:02:02.124 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:32:47 -0500 (0:00:00.043)       0:02:02.168 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.045)       0:02:02.214 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.064)       0:02:02.278 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.072)       0:02:02.350 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.122)       0:02:02.473 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.083)       0:02:02.557 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.111)       0:02:02.668 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.074)       0:02:02.743 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:32:48 -0500 (0:00:00.078)       0:02:02.822 ***** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:32:49 -0500 (0:00:00.532)       0:02:03.354 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:32:49 -0500 (0:00:00.141)       0:02:03.496 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:32:49 -0500 (0:00:00.111)       0:02:03.607 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:32:49 -0500 (0:00:00.126)       0:02:03.734 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:32:49 -0500 (0:00:00.152)       0:02:03.886 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:32:49 -0500 (0:00:00.130)       0:02:04.016 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:32:49 -0500 (0:00:00.128)       0:02:04.145 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.243)       0:02:04.388 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.108)       0:02:04.497 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.081)       0:02:04.579 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.069)       0:02:04.648 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.071)       0:02:04.719 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.067)       0:02:04.787 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.098)       0:02:04.885 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.073)       0:02:04.959 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.109)       0:02:05.068 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:32:50 -0500 (0:00:00.085)       0:02:05.154 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.095)       0:02:05.249 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.081)       0:02:05.331 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.117)       0:02:05.448 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.092)       0:02:05.540 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.122)       0:02:05.663 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.121)       0:02:05.785 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.114)       0:02:05.899 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:32:51 -0500 (0:00:00.161)       0:02:06.060 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.154)       0:02:06.215 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.130)       0:02:06.345 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.088)       0:02:06.434 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.089)       0:02:06.523 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.105)       0:02:06.628 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.065)       0:02:06.693 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.088)       0:02:06.782 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.080)       0:02:06.862 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.109)       0:02:06.972 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:32:52 -0500 (0:00:00.120)       0:02:07.093 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.100)       0:02:07.194 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:111
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.071)       0:02:07.265 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.152)       0:02:07.418 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.073)       0:02:07.491 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.072)       0:02:07.564 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.088)       0:02:07.653 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.044)       0:02:07.698 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.043)       0:02:07.741 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.046)       0:02:07.788 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.077)       0:02:07.865 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.146)       0:02:08.011 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:32:53 -0500 (0:00:00.081)       0:02:08.093 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g",
                    "state": "absent"
                },
                {
                    "mount_point": "/opt/test2",
                    "name": "test2",
                    "size": "4g",
                    "state": "absent"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.149)       0:02:08.242 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.051)       0:02:08.293 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.048)       0:02:08.342 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.044)       0:02:08.387 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.049)       0:02:08.437 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.044)       0:02:08.481 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.075)       0:02:08.556 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:32:54 -0500 (0:00:00.036)       0:02:08.592 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/stratis/foo/test2",
            "fs_type": "stratis xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo/test2",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test2",
            "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
            "state": "absent"
        },
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
            "state": "absent"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:33:08 -0500 (0:00:13.698)       0:02:22.291 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:33:08 -0500 (0:00:00.067)       0:02:22.358 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057542.903459,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "0f6d0a908e0a22fd44295df7bd176ec70368c7cf",
        "ctime": 1739057542.901459,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057542.901459,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1506,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:33:08 -0500 (0:00:00.513)       0:02:22.872 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:33:09 -0500 (0:00:00.510)       0:02:23.382 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:33:09 -0500 (0:00:00.071)       0:02:23.453 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/stratis/foo/test2",
                "fs_type": "stratis xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo/test2",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test2",
                "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                "state": "absent"
            },
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                "state": "absent"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:33:09 -0500 (0:00:00.085)       0:02:23.539 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    },
                    {
                        "_device": "/dev/stratis/foo/test2",
                        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                        "_raw_device": "/dev/stratis/foo/test2",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test2",
                        "mount_user": null,
                        "name": "test2",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:33:09 -0500 (0:00:00.079)       0:02:23.619 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:33:09 -0500 (0:00:00.075)       0:02:23.695 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=664da094-bb3a-4735-be34-505e74d1f599', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test2",
        "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "state": "absent"
    },
    "name": "/opt/test2",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=664da094-bb3a-4735-be34-505e74d1f599"
}
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:33:10 -0500 (0:00:01.131)       0:02:24.827 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:33:11 -0500 (0:00:01.051)       0:02:25.878 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:33:11 -0500 (0:00:00.110)       0:02:25.988 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:33:11 -0500 (0:00:00.098)       0:02:26.087 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:33:12 -0500 (0:00:01.043)       0:02:27.131 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:33:13 -0500 (0:00:00.557)       0:02:27.689 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:33:13 -0500 (0:00:00.045)       0:02:27.734 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:130
Saturday 08 February 2025  18:33:14 -0500 (0:00:01.067)       0:02:28.801 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:33:14 -0500 (0:00:00.210)       0:02:29.012 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                },
                {
                    "_device": "/dev/stratis/foo/test2",
                    "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
                    "_raw_device": "/dev/stratis/foo/test2",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test2",
                    "mount_user": null,
                    "name": "test2",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:33:14 -0500 (0:00:00.137)       0:02:29.149 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:33:15 -0500 (0:00:00.185)       0:02:29.335 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:33:15 -0500 (0:00:00.535)       0:02:29.871 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003882",
    "end": "2025-02-08 18:33:16.073105",
    "rc": 0,
    "start": "2025-02-08 18:33:16.069223"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:33:16 -0500 (0:00:00.464)       0:02:30.336 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:01.004439",
    "end": "2025-02-08 18:33:17.532051",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:33:16.527612"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:33:17 -0500 (0:00:01.479)       0:02:31.815 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863'}, {'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:33:17 -0500 (0:00:00.232)       0:02:32.047 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:33:17 -0500 (0:00:00.058)       0:02:32.106 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:33:17 -0500 (0:00:00.052)       0:02:32.158 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.045)       0:02:32.204 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.077)       0:02:32.282 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.040)       0:02:32.323 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.034)       0:02:32.357 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.078)       0:02:32.436 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.078)       0:02:32.514 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.086)       0:02:32.601 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.156)       0:02:32.757 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.064)       0:02:32.822 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.066)       0:02:32.888 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:33:18 -0500 (0:00:00.061)       0:02:32.950 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.602)       0:02:33.552 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.087)       0:02:33.640 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.160)       0:02:33.801 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.081)       0:02:33.883 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.073)       0:02:33.956 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.072)       0:02:34.029 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.068)       0:02:34.097 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:33:19 -0500 (0:00:00.051)       0:02:34.149 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.044)       0:02:34.193 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.044)       0:02:34.238 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.042)       0:02:34.280 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.041)       0:02:34.322 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.037)       0:02:34.359 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.041)       0:02:34.400 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.081)       0:02:34.481 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "_raw_device": "/dev/stratis/foo/test2",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.066)       0:02:34.548 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.079)       0:02:34.628 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "_raw_device": "/dev/stratis/foo/test2",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.084)       0:02:34.712 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.119)       0:02:34.831 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.064)       0:02:34.895 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.046)       0:02:34.942 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.038)       0:02:34.981 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.039)       0:02:35.021 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.081)       0:02:35.102 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=2b39409e-cf08-47bd-ab00-2dbbae942863",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test2",
        "_mount_id": "UUID=664da094-bb3a-4735-be34-505e74d1f599",
        "_raw_device": "/dev/stratis/foo/test2",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test2",
        "mount_user": null,
        "name": "test2",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:33:20 -0500 (0:00:00.050)       0:02:35.153 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:33:21 -0500 (0:00:00.083)       0:02:35.236 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.380420",
    "end": "2025-02-08 18:33:21.796145",
    "rc": 0,
    "start": "2025-02-08 18:33:21.415725"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:33:21 -0500 (0:00:00.837)       0:02:36.073 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.111)       0:02:36.184 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.059)       0:02:36.244 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.051)       0:02:36.295 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.059)       0:02:36.354 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.048)       0:02:36.403 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.039)       0:02:36.442 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=2b39409e-cf08-47bd-ab00-2dbbae942863'})
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=664da094-bb3a-4735-be34-505e74d1f599'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.144)       0:02:36.587 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.080)       0:02:36.667 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.185)       0:02:36.853 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.052)       0:02:36.906 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.059)       0:02:36.965 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.037)       0:02:37.003 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.040)       0:02:37.043 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.044)       0:02:37.088 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:33:22 -0500 (0:00:00.058)       0:02:37.146 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.077)       0:02:37.223 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.068)       0:02:37.291 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.067)       0:02:37.359 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.099)       0:02:37.459 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.089)       0:02:37.549 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.143)       0:02:37.692 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.071)       0:02:37.764 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.064)       0:02:37.828 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.051)       0:02:37.880 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.061)       0:02:37.941 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.043)       0:02:37.985 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.047)       0:02:38.033 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:33:23 -0500 (0:00:00.052)       0:02:38.085 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.468)       0:02:38.554 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.056)       0:02:38.610 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.056)       0:02:38.667 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.062)       0:02:38.729 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.049)       0:02:38.779 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.037)       0:02:38.817 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.041)       0:02:38.858 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:33:24 -0500 (0:00:00.049)       0:02:38.907 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:33:26 -0500 (0:00:01.548)       0:02:40.455 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.097)       0:02:40.553 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.097)       0:02:40.651 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.065)       0:02:40.716 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.124)       0:02:40.840 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.077)       0:02:40.918 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.062)       0:02:40.981 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.061)       0:02:41.042 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:33:26 -0500 (0:00:00.067)       0:02:41.110 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.098)       0:02:41.209 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.094)       0:02:41.304 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.083)       0:02:41.387 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.163)       0:02:41.551 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.084)       0:02:41.636 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.119)       0:02:41.755 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.127)       0:02:41.882 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.131)       0:02:42.014 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.070)       0:02:42.085 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:33:27 -0500 (0:00:00.080)       0:02:42.165 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.062)       0:02:42.228 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.071)       0:02:42.300 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.068)       0:02:42.369 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.067)       0:02:42.436 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.068)       0:02:42.505 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.085)       0:02:42.591 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.091)       0:02:42.683 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.098)       0:02:42.782 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.077)       0:02:42.859 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.076)       0:02:42.936 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.109)       0:02:43.045 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:33:28 -0500 (0:00:00.113)       0:02:43.158 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.093)       0:02:43.252 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.098)       0:02:43.351 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.083)       0:02:43.434 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.062)       0:02:43.496 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.066)       0:02:43.562 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.060)       0:02:43.623 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.062)       0:02:43.686 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.061)       0:02:43.748 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.068)       0:02:43.816 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.050)       0:02:43.867 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.040)       0:02:43.907 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.041)       0:02:43.949 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.047)       0:02:43.997 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.060)       0:02:44.058 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:33:29 -0500 (0:00:00.083)       0:02:44.142 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.062)       0:02:44.204 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.067)       0:02:44.272 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.090)       0:02:44.362 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.105)       0:02:44.467 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.074)       0:02:44.542 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.079)       0:02:44.621 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.047)       0:02:44.669 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.043)       0:02:44.713 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.043)       0:02:44.756 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.037)       0:02:44.793 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.041)       0:02:44.834 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.089)       0:02:44.923 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.041)       0:02:44.965 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.048)       0:02:45.014 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:33:30 -0500 (0:00:00.088)       0:02:45.103 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.272)       0:02:45.375 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test2"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.074)       0:02:45.450 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test2",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.097)       0:02:45.548 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.043)       0:02:45.591 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.041)       0:02:45.632 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.042)       0:02:45.675 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.045)       0:02:45.721 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.037)       0:02:45.758 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.042)       0:02:45.801 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.059)       0:02:45.861 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.067)       0:02:45.928 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:33:31 -0500 (0:00:00.103)       0:02:46.031 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.201)       0:02:46.233 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.148)       0:02:46.381 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.160)       0:02:46.542 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.121)       0:02:46.664 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.128)       0:02:46.792 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.104)       0:02:46.896 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.085)       0:02:46.982 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:33:32 -0500 (0:00:00.097)       0:02:47.080 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:33:33 -0500 (0:00:00.589)       0:02:47.669 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:33:33 -0500 (0:00:00.085)       0:02:47.754 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:33:33 -0500 (0:00:00.127)       0:02:47.881 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:33:33 -0500 (0:00:00.081)       0:02:47.963 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:33:33 -0500 (0:00:00.132)       0:02:48.096 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:33:34 -0500 (0:00:00.099)       0:02:48.196 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:33:34 -0500 (0:00:00.127)       0:02:48.323 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:33:34 -0500 (0:00:00.076)       0:02:48.400 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:33:35 -0500 (0:00:01.566)       0:02:49.966 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:33:35 -0500 (0:00:00.094)       0:02:50.060 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:33:35 -0500 (0:00:00.104)       0:02:50.165 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.101)       0:02:50.267 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.101)       0:02:50.369 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.077)       0:02:50.446 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.082)       0:02:50.529 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.074)       0:02:50.603 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.077)       0:02:50.681 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.093)       0:02:50.775 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.184)       0:02:50.960 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.083)       0:02:51.044 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:33:36 -0500 (0:00:00.067)       0:02:51.111 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.063)       0:02:51.174 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.051)       0:02:51.226 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.042)       0:02:51.268 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.037)       0:02:51.306 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.037)       0:02:51.343 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.042)       0:02:51.386 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.038)       0:02:51.424 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.038)       0:02:51.463 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.049)       0:02:51.512 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.069)       0:02:51.582 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.065)       0:02:51.647 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.093)       0:02:51.741 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.083)       0:02:51.825 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.086)       0:02:51.912 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.055)       0:02:51.968 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.049)       0:02:52.017 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.059)       0:02:52.076 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:33:37 -0500 (0:00:00.056)       0:02:52.132 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.052)       0:02:52.185 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.051)       0:02:52.237 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.051)       0:02:52.288 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.042)       0:02:52.331 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.038)       0:02:52.369 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.038)       0:02:52.408 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.037)       0:02:52.445 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.043)       0:02:52.488 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.038)       0:02:52.526 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.037)       0:02:52.564 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.037)       0:02:52.602 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.042)       0:02:52.644 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.038)       0:02:52.683 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.037)       0:02:52.721 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.053)       0:02:52.775 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.047)       0:02:52.822 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.043)       0:02:52.866 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.060)       0:02:52.926 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.063)       0:02:52.990 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.053)       0:02:53.043 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.065)       0:02:53.109 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:33:38 -0500 (0:00:00.043)       0:02:53.152 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.043)       0:02:53.195 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.142)       0:02:53.338 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.065)       0:02:53.403 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.108)       0:02:53.512 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.074)       0:02:53.587 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.097)       0:02:53.685 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.075)       0:02:53.760 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.067)       0:02:53.828 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create encrypted Stratis pool] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:136
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.076)       0:02:53.904 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:33:39 -0500 (0:00:00.202)       0:02:54.107 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.198)       0:02:54.306 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.155)       0:02:54.462 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.150)       0:02:54.612 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.053)       0:02:54.665 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.053)       0:02:54.719 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.051)       0:02:54.771 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.046)       0:02:54.818 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.114)       0:02:54.932 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.068)       0:02:55.001 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.061)       0:02:55.062 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:33:40 -0500 (0:00:00.053)       0:02:55.115 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:33:41 -0500 (0:00:00.057)       0:02:55.173 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:33:41 -0500 (0:00:00.081)       0:02:55.255 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:33:41 -0500 (0:00:00.057)       0:02:55.312 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:33:41 -0500 (0:00:00.052)       0:02:55.364 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:33:41 -0500 (0:00:00.111)       0:02:55.476 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:33:41 -0500 (0:00:00.065)       0:02:55.542 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "create format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0",
        "/dev/stratis/foo/test1"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
            "state": "mounted"
        }
    ],
    "packages": [
        "stratis-cli",
        "stratisd",
        "xfsprogs",
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:35:12 -0500 (0:01:30.834)       0:04:26.376 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:35:12 -0500 (0:00:00.144)       0:04:26.520 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057590.48166,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057590.4806597,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057590.4806597,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:35:12 -0500 (0:00:00.604)       0:04:27.125 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:35:13 -0500 (0:00:00.594)       0:04:27.720 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:35:13 -0500 (0:00:00.087)       0:04:27.807 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "create format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0",
            "/dev/stratis/foo/test1"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                "state": "mounted"
            }
        ],
        "packages": [
            "stratis-cli",
            "stratisd",
            "xfsprogs",
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:35:13 -0500 (0:00:00.097)       0:04:27.905 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:35:13 -0500 (0:00:00.104)       0:04:28.009 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:35:13 -0500 (0:00:00.087)       0:04:28.096 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:35:14 -0500 (0:00:00.091)       0:04:28.188 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:35:15 -0500 (0:00:01.001)       0:04:29.190 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:35:15 -0500 (0:00:00.702)       0:04:29.892 ***** 
skipping: [managed-node3] => (item={'src': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:35:15 -0500 (0:00:00.217)       0:04:30.109 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:35:17 -0500 (0:00:01.103)       0:04:31.213 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:35:17 -0500 (0:00:00.606)       0:04:31.820 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:35:17 -0500 (0:00:00.069)       0:04:31.890 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:151
Saturday 08 February 2025  18:35:19 -0500 (0:00:01.438)       0:04:33.328 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:35:19 -0500 (0:00:00.129)       0:04:33.458 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:35:19 -0500 (0:00:00.102)       0:04:33.560 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:35:19 -0500 (0:00:00.123)       0:04:33.683 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-1fbcc57b21754219bebc63d4defafeb2-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-1fbcc57b21754219bebc63d4defafeb2-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "1fbcc57b-2175-4219-bebc-63d4defafeb2"
        },
        "/dev/mapper/stratis-1-private-380de7e3119b406db6e7a9ad02601388-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-380de7e3119b406db6e7a9ad02601388-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "380de7e3-119b-406d-b6e7-a9ad02601388"
        },
        "/dev/mapper/stratis-1-private-3a4e70f6822249ddb46ceaff91d5c750-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-3a4e70f6822249ddb46ceaff91d5c750-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "3a4e70f6-8222-49dd-b46c-eaff91d5c750"
        },
        "/dev/mapper/stratis-1-private-5f2a642c4e7b4514b6240950fb2a51f2-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-5f2a642c4e7b4514b6240950fb2a51f2-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "5f2a642c-4e7b-4514-b624-0950fb2a51f2"
        },
        "/dev/mapper/stratis-1-private-76e18be4ba774946aab0be32eee04ac9-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-76e18be4ba774946aab0be32eee04ac9-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "76e18be4-ba77-4946-aab0-be32eee04ac9"
        },
        "/dev/mapper/stratis-1-private-778ed417da4b4b969257ec01bb2fe2af-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-778ed417da4b4b969257ec01bb2fe2af-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "778ed417-da4b-4b96-9257-ec01bb2fe2af"
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-811e25ee3cd948869e9c6400552587fb-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-811e25ee3cd948869e9c6400552587fb-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "811e25ee-3cd9-4886-9e9c-6400552587fb"
        },
        "/dev/mapper/stratis-1-private-8b994970ec9e4f8588ac36a7f8ea5e26-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-8b994970ec9e4f8588ac36a7f8ea5e26-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "8b994970-ec9e-4f85-88ac-36a7f8ea5e26"
        },
        "/dev/mapper/stratis-1-private-8e5a880651a84a54ad37c210dc69385c-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-8e5a880651a84a54ad37c210dc69385c-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "8e5a8806-51a8-4a54-ad37-c210dc69385c"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "5669f87b-91f8-47a4-8650-0e743f96bd48"
        },
        "/dev/sdb": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "2561b1d5-0bdf-41f5-b4f0-968f9dc36b4f"
        },
        "/dev/sdc": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "e9204989-c4e6-42bc-aa94-fb863c217dae"
        },
        "/dev/sdd": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "e707e9a1-bafd-44de-8cc8-7c0da6f3e1f3"
        },
        "/dev/sde": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "49e9209b-03ad-410b-a99e-5c9fefc84190"
        },
        "/dev/sdf": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "7e3ceb8f-ddbf-4236-8a41-5d2abe76049f"
        },
        "/dev/sdg": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "e53d8bcc-3be0-4815-9c97-e0d6bc73f26c"
        },
        "/dev/sdh": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "6d017cfb-5917-4e32-92fc-c3bf23587b0e"
        },
        "/dev/sdi": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "365dc560-3450-4cef-93b8-0de5bd41dd39"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "49ecc62e-29ff-42fd-935d-363e3336463a"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:35:20 -0500 (0:00:00.543)       0:04:34.227 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003464",
    "end": "2025-02-08 18:35:20.453027",
    "rc": 0,
    "start": "2025-02-08 18:35:20.449563"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=49ecc62e-29ff-42fd-935d-363e3336463a /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:35:20 -0500 (0:00:00.512)       0:04:34.739 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003246",
    "end": "2025-02-08 18:35:20.984401",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:35:20.981155"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.534)       0:04:35.274 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.162)       0:04:35.436 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.088)       0:04:35.525 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.091)       0:04:35.616 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.140)       0:04:35.757 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.259)       0:04:36.017 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.096)       0:04:36.113 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:35:21 -0500 (0:00:00.056)       0:04:36.170 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:35:22 -0500 (0:00:00.149)       0:04:36.320 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:35:22 -0500 (0:00:00.089)       0:04:36.410 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:35:22 -0500 (0:00:00.098)       0:04:36.508 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:35:22 -0500 (0:00:00.098)       0:04:36.606 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:35:22 -0500 (0:00:00.097)       0:04:36.704 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:35:22 -0500 (0:00:00.091)       0:04:36.795 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:35:22 -0500 (0:00:00.092)       0:04:36.888 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:35:23 -0500 (0:00:00.606)       0:04:37.495 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:35:23 -0500 (0:00:00.147)       0:04:37.643 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:35:23 -0500 (0:00:00.199)       0:04:37.842 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:35:23 -0500 (0:00:00.117)       0:04:37.960 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:35:23 -0500 (0:00:00.125)       0:04:38.085 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.091)       0:04:38.177 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.099)       0:04:38.277 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.077)       0:04:38.354 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.089)       0:04:38.444 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.111)       0:04:38.556 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.108)       0:04:38.664 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.087)       0:04:38.752 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.099)       0:04:38.851 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.081)       0:04:38.932 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:35:24 -0500 (0:00:00.154)       0:04:39.086 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.255)       0:04:39.342 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.291)       0:04:39.634 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.107)       0:04:39.742 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.117)       0:04:39.859 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.071)       0:04:39.931 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.041)       0:04:39.972 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.041)       0:04:40.014 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:35:25 -0500 (0:00:00.074)       0:04:40.088 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:35:26 -0500 (0:00:00.168)       0:04:40.257 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:35:26 -0500 (0:00:00.092)       0:04:40.349 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:35:26 -0500 (0:00:00.167)       0:04:40.517 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.378386",
    "end": "2025-02-08 18:35:27.131329",
    "rc": 0,
    "start": "2025-02-08 18:35:26.752943"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sda",
                        "size": "20938752 sectors",
                        "uuid": "8b994970-ec9e-4f85-88ac-36a7f8ea5e26"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdb",
                        "size": "20938752 sectors",
                        "uuid": "811e25ee-3cd9-4886-9e9c-6400552587fb"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdc",
                        "size": "20938752 sectors",
                        "uuid": "380de7e3-119b-406d-b6e7-a9ad02601388"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdd",
                        "size": "2147450880 sectors",
                        "uuid": "5f2a642c-4e7b-4514-b624-0950fb2a51f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sde",
                        "size": "2147450880 sectors",
                        "uuid": "778ed417-da4b-4b96-9257-ec01bb2fe2af"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdf",
                        "size": "20938752 sectors",
                        "uuid": "1fbcc57b-2175-4219-bebc-63d4defafeb2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdg",
                        "size": "2147450880 sectors",
                        "uuid": "3a4e70f6-8222-49dd-b46c-eaff91d5c750"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdh",
                        "size": "20938752 sectors",
                        "uuid": "76e18be4-ba77-4946-aab0-be32eee04ac9"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdi",
                        "size": "20938752 sectors",
                        "uuid": "8e5a8806-51a8-4a54-ad37-c210dc69385c"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "49ecc62e-29ff-42fd-935d-363e3336463a"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "7ae6fcee-c99a-4e69-bc5e-c933a13591bf"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:35:27 -0500 (0:00:00.898)       0:04:41.415 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sda",
                                "size": "20938752 sectors",
                                "uuid": "8b994970-ec9e-4f85-88ac-36a7f8ea5e26"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdb",
                                "size": "20938752 sectors",
                                "uuid": "811e25ee-3cd9-4886-9e9c-6400552587fb"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdc",
                                "size": "20938752 sectors",
                                "uuid": "380de7e3-119b-406d-b6e7-a9ad02601388"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdd",
                                "size": "2147450880 sectors",
                                "uuid": "5f2a642c-4e7b-4514-b624-0950fb2a51f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sde",
                                "size": "2147450880 sectors",
                                "uuid": "778ed417-da4b-4b96-9257-ec01bb2fe2af"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdf",
                                "size": "20938752 sectors",
                                "uuid": "1fbcc57b-2175-4219-bebc-63d4defafeb2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdg",
                                "size": "2147450880 sectors",
                                "uuid": "3a4e70f6-8222-49dd-b46c-eaff91d5c750"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdh",
                                "size": "20938752 sectors",
                                "uuid": "76e18be4-ba77-4946-aab0-be32eee04ac9"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdi",
                                "size": "20938752 sectors",
                                "uuid": "8e5a8806-51a8-4a54-ad37-c210dc69385c"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "49ecc62e-29ff-42fd-935d-363e3336463a"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "7ae6fcee-c99a-4e69-bc5e-c933a13591bf"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:35:27 -0500 (0:00:00.124)       0:04:41.540 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:35:27 -0500 (0:00:00.128)       0:04:41.668 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:35:27 -0500 (0:00:00.123)       0:04:41.792 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_clevis_pin == 'tang'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:35:27 -0500 (0:00:00.084)       0:04:41.877 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:35:27 -0500 (0:00:00.076)       0:04:41.953 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:35:27 -0500 (0:00:00.084)       0:04:42.039 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:35:28 -0500 (0:00:00.176)       0:04:42.215 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:35:28 -0500 (0:00:00.115)       0:04:42.330 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:35:28 -0500 (0:00:00.553)       0:04:42.884 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:35:28 -0500 (0:00:00.121)       0:04:43.006 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:35:28 -0500 (0:00:00.135)       0:04:43.142 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:35:29 -0500 (0:00:00.081)       0:04:43.224 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:35:29 -0500 (0:00:00.132)       0:04:43.356 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:35:29 -0500 (0:00:00.143)       0:04:43.500 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:35:29 -0500 (0:00:00.156)       0:04:43.657 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:35:29 -0500 (0:00:00.146)       0:04:43.804 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:35:29 -0500 (0:00:00.182)       0:04:43.986 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:35:29 -0500 (0:00:00.161)       0:04:44.148 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:35:30 -0500 (0:00:00.104)       0:04:44.252 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:35:30 -0500 (0:00:00.133)       0:04:44.386 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=49ecc62e-29ff-42fd-935d-363e3336463a "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:35:30 -0500 (0:00:00.209)       0:04:44.595 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:35:30 -0500 (0:00:00.183)       0:04:44.779 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:35:30 -0500 (0:00:00.218)       0:04:44.997 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:35:31 -0500 (0:00:00.182)       0:04:45.179 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:35:31 -0500 (0:00:00.309)       0:04:45.489 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:35:31 -0500 (0:00:00.100)       0:04:45.590 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:35:31 -0500 (0:00:00.086)       0:04:45.677 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:35:31 -0500 (0:00:00.083)       0:04:45.760 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057711.9881754,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1739057711.9881754,
        "dev": 6,
        "device_type": 64782,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8447,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1739057711.9881754,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.487)       0:04:46.247 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.054)       0:04:46.302 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.048)       0:04:46.350 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.052)       0:04:46.403 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.057)       0:04:46.460 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.048)       0:04:46.508 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.052)       0:04:46.561 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:35:32 -0500 (0:00:00.047)       0:04:46.608 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:35:33 -0500 (0:00:01.447)       0:04:48.055 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:35:33 -0500 (0:00:00.081)       0:04:48.137 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.060)       0:04:48.197 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.094)       0:04:48.291 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.068)       0:04:48.359 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.058)       0:04:48.418 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.079)       0:04:48.497 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.073)       0:04:48.571 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.063)       0:04:48.635 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.079)       0:04:48.715 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.083)       0:04:48.798 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.096)       0:04:48.895 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:35:34 -0500 (0:00:00.126)       0:04:49.021 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.194)       0:04:49.216 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.111)       0:04:49.327 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.106)       0:04:49.433 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.098)       0:04:49.532 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.098)       0:04:49.631 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.061)       0:04:49.692 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.055)       0:04:49.748 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.063)       0:04:49.812 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.055)       0:04:49.868 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.075)       0:04:49.943 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.057)       0:04:50.001 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:35:35 -0500 (0:00:00.062)       0:04:50.063 ***** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:35:36 -0500 (0:00:00.470)       0:04:50.533 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:35:36 -0500 (0:00:00.091)       0:04:50.625 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:35:36 -0500 (0:00:00.064)       0:04:50.690 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:35:36 -0500 (0:00:00.065)       0:04:50.755 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:35:36 -0500 (0:00:00.068)       0:04:50.823 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:35:36 -0500 (0:00:00.094)       0:04:50.918 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:35:36 -0500 (0:00:00.102)       0:04:51.021 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.288)       0:04:51.309 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.095)       0:04:51.404 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.052)       0:04:51.457 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.047)       0:04:51.504 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.052)       0:04:51.557 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.048)       0:04:51.605 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.047)       0:04:51.652 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.047)       0:04:51.700 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.053)       0:04:51.753 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.047)       0:04:51.801 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.047)       0:04:51.848 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.048)       0:04:51.896 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.053)       0:04:51.950 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.048)       0:04:51.998 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.051)       0:04:52.050 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:35:37 -0500 (0:00:00.084)       0:04:52.134 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.109)       0:04:52.244 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.074)       0:04:52.319 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.083)       0:04:52.403 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.065)       0:04:52.468 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.052)       0:04:52.521 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.052)       0:04:52.574 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.081)       0:04:52.655 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.128)       0:04:52.784 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.100)       0:04:52.884 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.069)       0:04:52.953 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.060)       0:04:53.014 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.060)       0:04:53.075 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.042)       0:04:53.117 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Repeat the previous invocation to verify idempotence] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:154
Saturday 08 February 2025  18:35:38 -0500 (0:00:00.049)       0:04:53.167 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.087)       0:04:53.255 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.077)       0:04:53.332 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.070)       0:04:53.402 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.129)       0:04:53.532 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.052)       0:04:53.585 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.048)       0:04:53.634 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.056)       0:04:53.690 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.075)       0:04:53.766 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.194)       0:04:53.960 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.080)       0:04:54.041 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_password": "yabbadabbadoo",
            "name": "foo",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:35:39 -0500 (0:00:00.120)       0:04:54.162 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:35:40 -0500 (0:00:00.221)       0:04:54.383 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:35:40 -0500 (0:00:00.124)       0:04:54.508 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:35:40 -0500 (0:00:00.113)       0:04:54.622 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:35:40 -0500 (0:00:00.078)       0:04:54.700 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:35:40 -0500 (0:00:00.081)       0:04:54.781 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:35:40 -0500 (0:00:00.098)       0:04:54.880 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:35:40 -0500 (0:00:00.064)       0:04:54.944 ***** 
ok: [managed-node3] => {
    "actions": [],
    "changed": false,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo/test1",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "dump": 0,
            "fstype": "xfs",
            "group": null,
            "mode": null,
            "opts": "defaults",
            "owner": null,
            "passno": 0,
            "path": "/opt/test1",
            "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
            "state": "mounted"
        }
    ],
    "packages": [
        "stratis-cli",
        "e2fsprogs",
        "stratisd",
        "xfsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:35:46 -0500 (0:00:05.228)       0:05:00.173 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:35:46 -0500 (0:00:00.170)       0:05:00.343 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057715.5311906,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "6c25c73e03fb6ff497a9989abb1d9de3783ab2d1",
        "ctime": 1739057715.5301905,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057715.5301905,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:35:46 -0500 (0:00:00.626)       0:05:00.969 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output is changed",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:35:46 -0500 (0:00:00.083)       0:05:01.053 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:35:46 -0500 (0:00:00.064)       0:05:01.117 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [],
        "changed": false,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo/test1",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "dump": 0,
                "fstype": "xfs",
                "group": null,
                "mode": null,
                "opts": "defaults",
                "owner": null,
                "passno": 0,
                "path": "/opt/test1",
                "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                "state": "mounted"
            }
        ],
        "packages": [
            "stratis-cli",
            "e2fsprogs",
            "stratisd",
            "xfsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:35:47 -0500 (0:00:00.165)       0:05:01.283 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_kernel_device": "/dev/dm-14",
                        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "_raw_kernel_device": "/dev/dm-14",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "present",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:35:47 -0500 (0:00:00.148)       0:05:01.432 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:35:47 -0500 (0:00:00.128)       0:05:01.561 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:35:47 -0500 (0:00:00.123)       0:05:01.687 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:35:48 -0500 (0:00:01.103)       0:05:02.790 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
ok: [managed-node3] => (item={'src': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": false,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "state": "mounted"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:35:49 -0500 (0:00:00.704)       0:05:03.495 ***** 
skipping: [managed-node3] => (item={'src': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None})  => {
    "ansible_loop_var": "mount_info",
    "changed": false,
    "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none",
    "mount_info": {
        "dump": 0,
        "fstype": "xfs",
        "group": null,
        "mode": null,
        "opts": "defaults",
        "owner": null,
        "passno": 0,
        "path": "/opt/test1",
        "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "state": "mounted"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:35:49 -0500 (0:00:00.171)       0:05:03.666 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:35:50 -0500 (0:00:01.092)       0:05:04.759 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:35:51 -0500 (0:00:00.650)       0:05:05.409 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:35:51 -0500 (0:00:00.107)       0:05:05.517 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:169
Saturday 08 February 2025  18:35:52 -0500 (0:00:01.265)       0:05:06.783 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:35:52 -0500 (0:00:00.163)       0:05:06.947 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_kernel_device": "/dev/dm-14",
                    "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "_raw_kernel_device": "/dev/dm-14",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "present",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:35:52 -0500 (0:00:00.128)       0:05:07.075 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:35:53 -0500 (0:00:00.114)       0:05:07.190 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-1fbcc57b21754219bebc63d4defafeb2-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-1fbcc57b21754219bebc63d4defafeb2-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "1fbcc57b-2175-4219-bebc-63d4defafeb2"
        },
        "/dev/mapper/stratis-1-private-380de7e3119b406db6e7a9ad02601388-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-380de7e3119b406db6e7a9ad02601388-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "380de7e3-119b-406d-b6e7-a9ad02601388"
        },
        "/dev/mapper/stratis-1-private-3a4e70f6822249ddb46ceaff91d5c750-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-3a4e70f6822249ddb46ceaff91d5c750-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "3a4e70f6-8222-49dd-b46c-eaff91d5c750"
        },
        "/dev/mapper/stratis-1-private-5f2a642c4e7b4514b6240950fb2a51f2-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-5f2a642c4e7b4514b6240950fb2a51f2-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "5f2a642c-4e7b-4514-b624-0950fb2a51f2"
        },
        "/dev/mapper/stratis-1-private-76e18be4ba774946aab0be32eee04ac9-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-76e18be4ba774946aab0be32eee04ac9-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "76e18be4-ba77-4946-aab0-be32eee04ac9"
        },
        "/dev/mapper/stratis-1-private-778ed417da4b4b969257ec01bb2fe2af-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-778ed417da4b4b969257ec01bb2fe2af-crypt",
            "size": "1024G",
            "type": "crypt",
            "uuid": "778ed417-da4b-4b96-9257-ec01bb2fe2af"
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thindata",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-flex-thinmeta",
            "size": "799M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-physical-originsub",
            "size": "52.1G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-7ae6fceec99a4e69bc5ec933a13591bf-thinpool-pool",
            "size": "50G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-811e25ee3cd948869e9c6400552587fb-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-811e25ee3cd948869e9c6400552587fb-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "811e25ee-3cd9-4886-9e9c-6400552587fb"
        },
        "/dev/mapper/stratis-1-private-8b994970ec9e4f8588ac36a7f8ea5e26-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-8b994970ec9e4f8588ac36a7f8ea5e26-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "8b994970-ec9e-4f85-88ac-36a7f8ea5e26"
        },
        "/dev/mapper/stratis-1-private-8e5a880651a84a54ad37c210dc69385c-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-8e5a880651a84a54ad37c210dc69385c-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "8e5a8806-51a8-4a54-ad37-c210dc69385c"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "5669f87b-91f8-47a4-8650-0e743f96bd48"
        },
        "/dev/sdb": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "2561b1d5-0bdf-41f5-b4f0-968f9dc36b4f"
        },
        "/dev/sdc": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": "e9204989-c4e6-42bc-aa94-fb863c217dae"
        },
        "/dev/sdd": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": "e707e9a1-bafd-44de-8cc8-7c0da6f3e1f3"
        },
        "/dev/sde": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": "49e9209b-03ad-410b-a99e-5c9fefc84190"
        },
        "/dev/sdf": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": "7e3ceb8f-ddbf-4236-8a41-5d2abe76049f"
        },
        "/dev/sdg": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": "e53d8bcc-3be0-4815-9c97-e0d6bc73f26c"
        },
        "/dev/sdh": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": "6d017cfb-5917-4e32-92fc-c3bf23587b0e"
        },
        "/dev/sdi": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": "365dc560-3450-4cef-93b8-0de5bd41dd39"
        },
        "/dev/stratis/foo/test1": {
            "fstype": "xfs",
            "label": "",
            "mountpoint": "/opt/test1",
            "name": "/dev/stratis/foo/test1",
            "size": "4G",
            "type": "stratis",
            "uuid": "49ecc62e-29ff-42fd-935d-363e3336463a"
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:35:53 -0500 (0:00:00.504)       0:05:07.695 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004976",
    "end": "2025-02-08 18:35:54.899283",
    "rc": 0,
    "start": "2025-02-08 18:35:53.894307"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
UUID=49ecc62e-29ff-42fd-935d-363e3336463a /opt/test1 xfs defaults 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:35:55 -0500 (0:00:01.486)       0:05:09.181 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003441",
    "end": "2025-02-08 18:35:55.408865",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:35:55.405424"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:35:55 -0500 (0:00:00.504)       0:05:09.685 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:35:55 -0500 (0:00:00.178)       0:05:09.864 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:35:55 -0500 (0:00:00.084)       0:05:09.949 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:35:55 -0500 (0:00:00.110)       0:05:10.059 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:35:56 -0500 (0:00:00.158)       0:05:10.218 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:35:56 -0500 (0:00:00.319)       0:05:10.537 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:35:56 -0500 (0:00:00.166)       0:05:10.703 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:35:56 -0500 (0:00:00.078)       0:05:10.782 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:35:56 -0500 (0:00:00.123)       0:05:10.906 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:35:56 -0500 (0:00:00.118)       0:05:11.025 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:35:56 -0500 (0:00:00.084)       0:05:11.109 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:35:57 -0500 (0:00:00.241)       0:05:11.351 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:35:57 -0500 (0:00:00.132)       0:05:11.483 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:35:57 -0500 (0:00:00.120)       0:05:11.603 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:35:57 -0500 (0:00:00.143)       0:05:11.747 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:35:58 -0500 (0:00:00.648)       0:05:12.395 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:35:58 -0500 (0:00:00.144)       0:05:12.540 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:35:58 -0500 (0:00:00.185)       0:05:12.726 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:35:58 -0500 (0:00:00.127)       0:05:12.853 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:35:58 -0500 (0:00:00.126)       0:05:12.980 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:35:58 -0500 (0:00:00.110)       0:05:13.090 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.085)       0:05:13.176 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.120)       0:05:13.296 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.094)       0:05:13.391 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.094)       0:05:13.485 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.089)       0:05:13.574 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.077)       0:05:13.651 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.086)       0:05:13.738 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.132)       0:05:13.871 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:35:59 -0500 (0:00:00.223)       0:05:14.094 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.090)       0:05:14.185 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.208)       0:05:14.393 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.132)       0:05:14.525 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.244)       0:05:14.769 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.140)       0:05:14.910 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.053)       0:05:14.964 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.049)       0:05:15.013 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:36:00 -0500 (0:00:00.090)       0:05:15.104 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:36:01 -0500 (0:00:00.176)       0:05:15.281 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_kernel_device": "/dev/dm-14",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "_raw_kernel_device": "/dev/dm-14",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "present",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:36:01 -0500 (0:00:00.132)       0:05:15.413 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:36:01 -0500 (0:00:00.289)       0:05:15.703 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.381207",
    "end": "2025-02-08 18:36:02.392961",
    "rc": 0,
    "start": "2025-02-08 18:36:02.011754"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sda",
                        "size": "20938752 sectors",
                        "uuid": "8b994970-ec9e-4f85-88ac-36a7f8ea5e26"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdb",
                        "size": "20938752 sectors",
                        "uuid": "811e25ee-3cd9-4886-9e9c-6400552587fb"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdc",
                        "size": "20938752 sectors",
                        "uuid": "380de7e3-119b-406d-b6e7-a9ad02601388"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdd",
                        "size": "2147450880 sectors",
                        "uuid": "5f2a642c-4e7b-4514-b624-0950fb2a51f2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sde",
                        "size": "2147450880 sectors",
                        "uuid": "778ed417-da4b-4b96-9257-ec01bb2fe2af"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdf",
                        "size": "20938752 sectors",
                        "uuid": "1fbcc57b-2175-4219-bebc-63d4defafeb2"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdg",
                        "size": "2147450880 sectors",
                        "uuid": "3a4e70f6-8222-49dd-b46c-eaff91d5c750"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdh",
                        "size": "20938752 sectors",
                        "uuid": "76e18be4-ba77-4946-aab0-be32eee04ac9"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "in_use": false,
                        "key_description": "blivet-foo",
                        "path": "/dev/sdi",
                        "size": "20938752 sectors",
                        "uuid": "8e5a8806-51a8-4a54-ad37-c210dc69385c"
                    }
                ]
            },
            "filesystems": [
                {
                    "name": "test1",
                    "origin": "Not set",
                    "size": "8388608 sectors",
                    "size_limit": "Not set",
                    "used": "72351744 bytes",
                    "uuid": "49ecc62e-29ff-42fd-935d-363e3336463a"
                }
            ],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "7ae6fcee-c99a-4e69-bc5e-c933a13591bf"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:36:02 -0500 (0:00:00.976)       0:05:16.680 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sda",
                                "size": "20938752 sectors",
                                "uuid": "8b994970-ec9e-4f85-88ac-36a7f8ea5e26"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdb",
                                "size": "20938752 sectors",
                                "uuid": "811e25ee-3cd9-4886-9e9c-6400552587fb"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdc",
                                "size": "20938752 sectors",
                                "uuid": "380de7e3-119b-406d-b6e7-a9ad02601388"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdd",
                                "size": "2147450880 sectors",
                                "uuid": "5f2a642c-4e7b-4514-b624-0950fb2a51f2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sde",
                                "size": "2147450880 sectors",
                                "uuid": "778ed417-da4b-4b96-9257-ec01bb2fe2af"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdf",
                                "size": "20938752 sectors",
                                "uuid": "1fbcc57b-2175-4219-bebc-63d4defafeb2"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdg",
                                "size": "2147450880 sectors",
                                "uuid": "3a4e70f6-8222-49dd-b46c-eaff91d5c750"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdh",
                                "size": "20938752 sectors",
                                "uuid": "76e18be4-ba77-4946-aab0-be32eee04ac9"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "in_use": false,
                                "key_description": "blivet-foo",
                                "path": "/dev/sdi",
                                "size": "20938752 sectors",
                                "uuid": "8e5a8806-51a8-4a54-ad37-c210dc69385c"
                            }
                        ]
                    },
                    "filesystems": [
                        {
                            "name": "test1",
                            "origin": "Not set",
                            "size": "8388608 sectors",
                            "size_limit": "Not set",
                            "used": "72351744 bytes",
                            "uuid": "49ecc62e-29ff-42fd-935d-363e3336463a"
                        }
                    ],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "7ae6fcee-c99a-4e69-bc5e-c933a13591bf"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:36:02 -0500 (0:00:00.103)       0:05:16.783 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:36:02 -0500 (0:00:00.159)       0:05:16.942 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:36:02 -0500 (0:00:00.111)       0:05:17.054 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption_clevis_pin == 'tang'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:36:03 -0500 (0:00:00.119)       0:05:17.173 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:36:03 -0500 (0:00:00.111)       0:05:17.285 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:36:03 -0500 (0:00:00.246)       0:05:17.532 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', '_kernel_device': '/dev/dm-14', '_raw_kernel_device': '/dev/dm-14'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:36:03 -0500 (0:00:00.164)       0:05:17.696 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": true,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:36:03 -0500 (0:00:00.105)       0:05:17.801 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.390)       0:05:18.192 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.089)       0:05:18.282 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.228)       0:05:18.511 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.088)       0:05:18.599 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.100)       0:05:18.699 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.079)       0:05:18.779 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.086)       0:05:18.865 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.080)       0:05:18.945 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.077)       0:05:19.023 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:36:04 -0500 (0:00:00.100)       0:05:19.124 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.085)       0:05:19.209 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.086)       0:05:19.296 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "1",
        "storage_test_fstab_expected_mount_options_matches": "1",
        "storage_test_fstab_expected_mount_point_matches": "1",
        "storage_test_fstab_id_matches": [
            "UUID=49ecc62e-29ff-42fd-935d-363e3336463a "
        ],
        "storage_test_fstab_mount_options_matches": [
            " /opt/test1 xfs defaults "
        ],
        "storage_test_fstab_mount_point_matches": [
            " /opt/test1 "
        ]
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.137)       0:05:19.433 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.110)       0:05:19.543 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.155)       0:05:19.698 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.121)       0:05:19.820 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.131)       0:05:19.951 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.085)       0:05:20.036 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:36:05 -0500 (0:00:00.084)       0:05:20.121 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:36:06 -0500 (0:00:00.102)       0:05:20.224 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057711.9881754,
        "attr_flags": "",
        "attributes": [],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "ctime": 1739057711.9881754,
        "dev": 6,
        "device_type": 64782,
        "executable": false,
        "exists": true,
        "gid": 6,
        "gr_name": "disk",
        "inode": 8447,
        "isblk": true,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": false,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/symlink",
        "mode": "0660",
        "mtime": 1739057711.9881754,
        "nlink": 1,
        "path": "/dev/stratis/foo/test1",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": null,
        "wgrp": true,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:36:06 -0500 (0:00:00.550)       0:05:20.774 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:36:06 -0500 (0:00:00.093)       0:05:20.867 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:36:06 -0500 (0:00:00.103)       0:05:20.971 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:36:06 -0500 (0:00:00.092)       0:05:21.063 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:36:06 -0500 (0:00:00.105)       0:05:21.168 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:36:07 -0500 (0:00:00.130)       0:05:21.299 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:36:07 -0500 (0:00:00.154)       0:05:21.453 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:36:07 -0500 (0:00:00.081)       0:05:21.535 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:36:08 -0500 (0:00:01.448)       0:05:22.983 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:36:08 -0500 (0:00:00.048)       0:05:23.032 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:36:08 -0500 (0:00:00.052)       0:05:23.084 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:36:08 -0500 (0:00:00.068)       0:05:23.153 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:36:09 -0500 (0:00:00.049)       0:05:23.202 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:36:09 -0500 (0:00:00.057)       0:05:23.260 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:36:09 -0500 (0:00:00.108)       0:05:23.369 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:36:09 -0500 (0:00:00.134)       0:05:23.504 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:36:09 -0500 (0:00:00.092)       0:05:23.597 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:36:09 -0500 (0:00:00.146)       0:05:23.743 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:36:09 -0500 (0:00:00.155)       0:05:23.899 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.313)       0:05:24.212 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.097)       0:05:24.309 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.096)       0:05:24.405 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.147)       0:05:24.553 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.146)       0:05:24.699 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.130)       0:05:24.830 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.094)       0:05:24.924 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.081)       0:05:25.005 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:36:10 -0500 (0:00:00.095)       0:05:25.100 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:36:11 -0500 (0:00:00.082)       0:05:25.183 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:36:11 -0500 (0:00:00.080)       0:05:25.264 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:36:11 -0500 (0:00:00.079)       0:05:25.343 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:36:11 -0500 (0:00:00.087)       0:05:25.430 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:36:11 -0500 (0:00:00.080)       0:05:25.511 ***** 
ok: [managed-node3] => {
    "bytes": 4294967296,
    "changed": false,
    "lvm": "4g",
    "parted": "4GiB",
    "size": "4 GiB"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:36:11 -0500 (0:00:00.541)       0:05:26.052 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:36:11 -0500 (0:00:00.079)       0:05:26.132 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.111)       0:05:26.243 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.061)       0:05:26.305 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.094)       0:05:26.399 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.079)       0:05:26.478 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.084)       0:05:26.563 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.type == \"lvm\""
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.060)       0:05:26.624 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.079)       0:05:26.703 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.046)       0:05:26.750 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.052)       0:05:26.802 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.048)       0:05:26.850 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.047)       0:05:26.898 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.047)       0:05:26.945 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.050)       0:05:26.996 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.047)       0:05:27.043 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.047)       0:05:27.090 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:36:12 -0500 (0:00:00.046)       0:05:27.137 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.051)       0:05:27.189 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.048)       0:05:27.237 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.046)       0:05:27.284 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.047)       0:05:27.332 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.052)       0:05:27.384 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.048)       0:05:27.432 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "bytes": 4294967296,
        "changed": false,
        "failed": false,
        "lvm": "4g",
        "parted": "4GiB",
        "size": "4 GiB"
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.051)       0:05:27.483 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.050)       0:05:27.534 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"lvm\"",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.074)       0:05:27.609 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.048)       0:05:27.657 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.068)       0:05:27.726 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.047)       0:05:27.773 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.119)       0:05:27.892 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.048)       0:05:27.941 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.048)       0:05:27.990 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.047)       0:05:28.037 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.056)       0:05:28.093 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:36:13 -0500 (0:00:00.037)       0:05:28.131 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:172
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.049)       0:05:28.181 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.100)       0:05:28.282 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.079)       0:05:28.361 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.098)       0:05:28.459 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.159)       0:05:28.618 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.053)       0:05:28.672 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.069)       0:05:28.741 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.058)       0:05:28.800 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.053)       0:05:28.853 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.093)       0:05:28.947 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.066)       0:05:29.013 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g",
                    "state": "absent"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.081)       0:05:29.094 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:36:14 -0500 (0:00:00.056)       0:05:29.151 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:36:15 -0500 (0:00:00.060)       0:05:29.211 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:36:15 -0500 (0:00:00.058)       0:05:29.269 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:36:15 -0500 (0:00:00.048)       0:05:29.318 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:36:15 -0500 (0:00:00.048)       0:05:29.367 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:36:15 -0500 (0:00:00.079)       0:05:29.446 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:36:15 -0500 (0:00:00.050)       0:05:29.496 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy format",
            "device": "/dev/stratis/foo/test1",
            "fs_type": "stratis xfs"
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo/test1",
            "fs_type": null
        },
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdg",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdi",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdf",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdc",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdd",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdh",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sde",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [
        {
            "fstype": "xfs",
            "path": "/opt/test1",
            "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
            "state": "absent"
        }
    ],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:36:27 -0500 (0:00:12.556)       0:05:42.053 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:36:27 -0500 (0:00:00.097)       0:05:42.150 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057715.5311906,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "6c25c73e03fb6ff497a9989abb1d9de3783ab2d1",
        "ctime": 1739057715.5301905,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057715.5301905,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1436,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:36:28 -0500 (0:00:00.508)       0:05:42.659 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:36:28 -0500 (0:00:00.500)       0:05:43.159 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:36:29 -0500 (0:00:00.043)       0:05:43.202 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy format",
                "device": "/dev/stratis/foo/test1",
                "fs_type": "stratis xfs"
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo/test1",
                "fs_type": null
            },
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdg",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdi",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdf",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdc",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdd",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdh",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sde",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [
            {
                "fstype": "xfs",
                "path": "/opt/test1",
                "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                "state": "absent"
            }
        ],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:36:29 -0500 (0:00:00.055)       0:05:43.258 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "/dev/stratis/foo/test1",
                        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                        "_raw_device": "/dev/stratis/foo/test1",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [
                            "sda",
                            "sdb",
                            "sdc",
                            "sdd",
                            "sde",
                            "sdf",
                            "sdg",
                            "sdh",
                            "sdi"
                        ],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:36:29 -0500 (0:00:00.053)       0:05:43.311 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:36:29 -0500 (0:00:00.056)       0:05:43.368 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
changed: [managed-node3] => (item={'src': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => {
    "ansible_loop_var": "mount_info",
    "backup_file": "",
    "boot": "yes",
    "changed": true,
    "dump": "0",
    "fstab": "/etc/fstab",
    "fstype": "xfs",
    "mount_info": {
        "fstype": "xfs",
        "path": "/opt/test1",
        "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "state": "absent"
    },
    "name": "/opt/test1",
    "opts": "defaults",
    "passno": "0",
    "src": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:36:29 -0500 (0:00:00.485)       0:05:43.853 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:36:30 -0500 (0:00:00.931)       0:05:44.785 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:36:30 -0500 (0:00:00.093)       0:05:44.879 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:36:30 -0500 (0:00:00.100)       0:05:44.980 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:36:31 -0500 (0:00:00.983)       0:05:45.963 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:36:32 -0500 (0:00:00.514)       0:05:46.478 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:36:32 -0500 (0:00:00.066)       0:05:46.544 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:187
Saturday 08 February 2025  18:36:33 -0500 (0:00:01.248)       0:05:47.792 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:36:33 -0500 (0:00:00.145)       0:05:47.938 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "/dev/stratis/foo/test1",
                    "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
                    "_raw_device": "/dev/stratis/foo/test1",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [
                        "sda",
                        "sdb",
                        "sdc",
                        "sdd",
                        "sde",
                        "sdf",
                        "sdg",
                        "sdh",
                        "sdi"
                    ],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:36:33 -0500 (0:00:00.107)       0:05:48.045 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:36:33 -0500 (0:00:00.092)       0:05:48.137 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:36:34 -0500 (0:00:00.493)       0:05:48.630 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003815",
    "end": "2025-02-08 18:36:34.833494",
    "rc": 0,
    "start": "2025-02-08 18:36:34.829679"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:36:34 -0500 (0:00:00.481)       0:05:49.112 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003346",
    "end": "2025-02-08 18:36:35.363664",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:36:35.360318"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.523)       0:05:49.635 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a'}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.117)       0:05:49.753 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.056)       0:05:49.809 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.049)       0:05:49.859 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.045)       0:05:49.905 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.095)       0:05:50.000 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.048)       0:05:50.049 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.035)       0:05:50.085 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:36:35 -0500 (0:00:00.045)       0:05:50.130 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.044)       0:05:50.175 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.049)       0:05:50.224 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.045)       0:05:50.270 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.046)       0:05:50.316 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.045)       0:05:50.361 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.039)       0:05:50.400 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.519)       0:05:50.920 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.070)       0:05:50.990 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.090)       0:05:51.081 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:36:36 -0500 (0:00:00.046)       0:05:51.127 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.051)       0:05:51.179 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.082)       0:05:51.262 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.104)       0:05:51.366 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.077)       0:05:51.443 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.079)       0:05:51.523 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.108)       0:05:51.632 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.091)       0:05:51.723 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.079)       0:05:51.802 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.083)       0:05:51.886 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.054)       0:05:51.940 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:36:37 -0500 (0:00:00.143)       0:05:52.083 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a'})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.091)       0:05:52.175 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.246)       0:05:52.422 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a'})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.066)       0:05:52.489 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.103)       0:05:52.592 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.067)       0:05:52.659 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.035)       0:05:52.695 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.035)       0:05:52.730 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.047)       0:05:52.778 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.094)       0:05:52.872 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a'})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "/dev/stratis/foo/test1",
        "_mount_id": "UUID=49ecc62e-29ff-42fd-935d-363e3336463a",
        "_raw_device": "/dev/stratis/foo/test1",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [
            "sda",
            "sdb",
            "sdc",
            "sdd",
            "sde",
            "sdf",
            "sdg",
            "sdh",
            "sdi"
        ],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.075)       0:05:52.947 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:36:38 -0500 (0:00:00.096)       0:05:53.043 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.378912",
    "end": "2025-02-08 18:36:39.618361",
    "rc": 0,
    "start": "2025-02-08 18:36:39.239449"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:36:39 -0500 (0:00:00.835)       0:05:53.879 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:36:39 -0500 (0:00:00.170)       0:05:54.050 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:36:39 -0500 (0:00:00.107)       0:05:54.158 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.132)       0:05:54.290 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.056)       0:05:54.346 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.074)       0:05:54.421 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.060)       0:05:54.481 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=49ecc62e-29ff-42fd-935d-363e3336463a'})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.084)       0:05:54.566 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.064)       0:05:54.630 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.195)       0:05:54.826 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": "/dev/stratis/foo/test1"
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.052)       0:05:54.879 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.066)       0:05:54.946 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.050)       0:05:54.996 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.054)       0:05:55.050 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.056)       0:05:55.107 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:36:40 -0500 (0:00:00.049)       0:05:55.157 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.080)       0:05:55.238 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.051)       0:05:55.289 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.051)       0:05:55.341 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.050)       0:05:55.391 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.056)       0:05:55.448 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.081)       0:05:55.529 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.064)       0:05:55.593 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.094)       0:05:55.687 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.147)       0:05:55.834 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.111)       0:05:55.946 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.070)       0:05:56.017 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:36:41 -0500 (0:00:00.048)       0:05:56.066 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.144)       0:05:56.210 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.436)       0:05:56.647 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.046)       0:05:56.693 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.057)       0:05:56.751 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.079)       0:05:56.831 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.079)       0:05:56.910 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.073)       0:05:56.983 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.073)       0:05:57.057 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:36:42 -0500 (0:00:00.094)       0:05:57.151 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:36:44 -0500 (0:00:01.484)       0:05:58.636 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.069)       0:05:58.706 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.050)       0:05:58.756 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.062)       0:05:58.819 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.051)       0:05:58.871 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.046)       0:05:58.918 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.045)       0:05:58.963 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.051)       0:05:59.015 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.046)       0:05:59.061 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:36:44 -0500 (0:00:00.068)       0:05:59.129 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.065)       0:05:59.194 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.141)       0:05:59.335 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.109)       0:05:59.445 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.080)       0:05:59.526 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.063)       0:05:59.589 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.050)       0:05:59.640 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.046)       0:05:59.686 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.045)       0:05:59.732 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.054)       0:05:59.787 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.124)       0:05:59.911 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.096)       0:06:00.007 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:36:45 -0500 (0:00:00.091)       0:06:00.099 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.109)       0:06:00.208 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.166)       0:06:00.375 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.104)       0:06:00.480 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.116)       0:06:00.596 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.110)       0:06:00.706 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.095)       0:06:00.802 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.084)       0:06:00.886 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.108)       0:06:00.995 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:36:46 -0500 (0:00:00.116)       0:06:01.111 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.119)       0:06:01.231 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.099)       0:06:01.330 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.090)       0:06:01.420 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.055)       0:06:01.475 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.055)       0:06:01.531 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.121)       0:06:01.652 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.048)       0:06:01.700 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.046)       0:06:01.746 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.058)       0:06:01.805 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.081)       0:06:01.887 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.084)       0:06:01.971 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.076)       0:06:02.047 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:36:47 -0500 (0:00:00.110)       0:06:02.158 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.086)       0:06:02.245 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.079)       0:06:02.324 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.076)       0:06:02.401 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.100)       0:06:02.501 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.124)       0:06:02.626 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.086)       0:06:02.712 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.082)       0:06:02.795 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.094)       0:06:02.889 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.084)       0:06:02.973 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.078)       0:06:03.052 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:36:48 -0500 (0:00:00.077)       0:06:03.129 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.073)       0:06:03.202 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.079)       0:06:03.282 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.073)       0:06:03.356 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.071)       0:06:03.428 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.076)       0:06:03.504 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.101)       0:06:03.606 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Create one Stratis pool on one disk] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:190
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.092)       0:06:03.698 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.260)       0:06:03.959 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:36:49 -0500 (0:00:00.184)       0:06:04.144 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.123)       0:06:04.268 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.159)       0:06:04.428 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.074)       0:06:04.502 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.088)       0:06:04.591 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.113)       0:06:04.705 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.093)       0:06:04.798 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.157)       0:06:04.956 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.079)       0:06:05.035 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": "sda",
            "name": "foo",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:36:50 -0500 (0:00:00.094)       0:06:05.129 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:36:51 -0500 (0:00:00.087)       0:06:05.216 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:36:51 -0500 (0:00:00.086)       0:06:05.303 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:36:51 -0500 (0:00:00.099)       0:06:05.403 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:36:51 -0500 (0:00:00.293)       0:06:05.697 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:36:51 -0500 (0:00:00.139)       0:06:05.836 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:36:51 -0500 (0:00:00.139)       0:06:05.976 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:36:51 -0500 (0:00:00.072)       0:06:06.048 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0",
        "/dev/stratis/foo"
    ],
    "mounts": [],
    "packages": [
        "stratisd",
        "e2fsprogs",
        "stratis-cli"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:36:54 -0500 (0:00:03.059)       0:06:09.108 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:36:54 -0500 (0:00:00.060)       0:06:09.169 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057789.5975058,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057789.5965059,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057789.5965059,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:36:55 -0500 (0:00:00.468)       0:06:09.638 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.549)       0:06:10.187 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.072)       0:06:10.259 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0",
            "/dev/stratis/foo"
        ],
        "mounts": [],
        "packages": [
            "stratisd",
            "e2fsprogs",
            "stratis-cli"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.106)       0:06:10.365 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.136)       0:06:10.502 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.087)       0:06:10.590 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.119)       0:06:10.710 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.076)       0:06:10.786 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.088)       0:06:10.874 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.088)       0:06:10.963 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:36:56 -0500 (0:00:00.112)       0:06:11.075 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:36:57 -0500 (0:00:00.494)       0:06:11.570 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:36:57 -0500 (0:00:00.061)       0:06:11.632 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:199
Saturday 08 February 2025  18:36:58 -0500 (0:00:01.094)       0:06:12.726 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:36:58 -0500 (0:00:00.266)       0:06:12.992 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:36:58 -0500 (0:00:00.156)       0:06:13.149 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:36:59 -0500 (0:00:00.214)       0:06:13.363 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thindata",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thinmeta",
            "size": "6M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-physical-originsub",
            "size": "10G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-thinpool-pool",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "f6db8a2f-c0d0-4680-b53e-55706f4ab9f1"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:36:59 -0500 (0:00:00.568)       0:06:13.932 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003446",
    "end": "2025-02-08 18:37:00.228730",
    "rc": 0,
    "start": "2025-02-08 18:37:00.225284"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:37:00 -0500 (0:00:00.576)       0:06:14.509 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003339",
    "end": "2025-02-08 18:37:00.732358",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:37:00.729019"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:37:00 -0500 (0:00:00.499)       0:06:15.009 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.174)       0:06:15.183 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.079)       0:06:15.263 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.125)       0:06:15.388 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.105)       0:06:15.493 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.187)       0:06:15.681 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.080)       0:06:15.762 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.066)       0:06:15.828 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.069)       0:06:15.898 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.082)       0:06:15.981 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.092)       0:06:16.073 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:37:01 -0500 (0:00:00.080)       0:06:16.153 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:37:02 -0500 (0:00:00.074)       0:06:16.228 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:37:02 -0500 (0:00:00.072)       0:06:16.301 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:37:02 -0500 (0:00:00.066)       0:06:16.368 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:37:02 -0500 (0:00:00.615)       0:06:16.983 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:37:02 -0500 (0:00:00.053)       0:06:17.037 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:37:02 -0500 (0:00:00.089)       0:06:17.126 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.047)       0:06:17.174 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.046)       0:06:17.221 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.046)       0:06:17.267 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.051)       0:06:17.318 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.047)       0:06:17.366 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.048)       0:06:17.414 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.055)       0:06:17.470 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.064)       0:06:17.534 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.048)       0:06:17.582 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.046)       0:06:17.629 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.048)       0:06:17.678 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.089)       0:06:17.767 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.038)       0:06:17.805 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.089)       0:06:17.894 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.041)       0:06:17.936 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.114)       0:06:18.050 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:37:03 -0500 (0:00:00.081)       0:06:18.131 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:37:04 -0500 (0:00:00.042)       0:06:18.174 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:37:04 -0500 (0:00:00.037)       0:06:18.211 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:37:04 -0500 (0:00:00.048)       0:06:18.260 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:37:04 -0500 (0:00:00.098)       0:06:18.359 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:37:04 -0500 (0:00:00.054)       0:06:18.413 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:37:04 -0500 (0:00:00.177)       0:06:18.591 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.381124",
    "end": "2025-02-08 18:37:05.199626",
    "rc": 0,
    "start": "2025-02-08 18:37:04.818502"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "f6db8a2f-c0d0-4680-b53e-55706f4ab9f1"
                    }
                ]
            },
            "filesystems": [],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "4e9c3e41-f290-464a-bf4b-333701ab5bb5"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.892)       0:06:19.483 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "f6db8a2f-c0d0-4680-b53e-55706f4ab9f1"
                            }
                        ]
                    },
                    "filesystems": [],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "4e9c3e41-f290-464a-bf4b-333701ab5bb5"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.086)       0:06:19.569 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.101)       0:06:19.671 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.060)       0:06:19.731 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.048)       0:06:19.780 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.049)       0:06:19.829 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.049)       0:06:19.878 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.041)       0:06:19.920 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.036)       0:06:19.956 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Add the second disk to the pool] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:202
Saturday 08 February 2025  18:37:05 -0500 (0:00:00.049)       0:06:20.005 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.194)       0:06:20.200 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.080)       0:06:20.280 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.099)       0:06:20.380 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.154)       0:06:20.535 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.057)       0:06:20.592 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.055)       0:06:20.647 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.069)       0:06:20.717 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.055)       0:06:20.773 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.101)       0:06:20.875 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.092)       0:06:20.967 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb"
            ],
            "name": "foo",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:37:06 -0500 (0:00:00.117)       0:06:21.084 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:37:07 -0500 (0:00:00.097)       0:06:21.182 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:37:07 -0500 (0:00:00.123)       0:06:21.305 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:37:07 -0500 (0:00:00.079)       0:06:21.384 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:37:07 -0500 (0:00:00.123)       0:06:21.507 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:37:07 -0500 (0:00:00.133)       0:06:21.641 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:37:07 -0500 (0:00:00.215)       0:06:21.857 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:37:07 -0500 (0:00:00.063)       0:06:21.921 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        },
        {
            "action": "add container member",
            "device": "/dev/sdb",
            "fs_type": null
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/stratis/foo",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [],
    "packages": [
        "stratis-cli",
        "e2fsprogs",
        "stratisd"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:37:10 -0500 (0:00:03.239)       0:06:25.161 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:37:11 -0500 (0:00:00.095)       0:06:25.256 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057789.5975058,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057789.5965059,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057789.5965059,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:37:11 -0500 (0:00:00.568)       0:06:25.826 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.543)       0:06:26.369 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.041)       0:06:26.411 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            },
            {
                "action": "add container member",
                "device": "/dev/sdb",
                "fs_type": null
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/stratis/foo",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [],
        "packages": [
            "stratis-cli",
            "e2fsprogs",
            "stratisd"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.053)       0:06:26.464 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.051)       0:06:26.515 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.055)       0:06:26.571 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.090)       0:06:26.661 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.052)       0:06:26.713 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.063)       0:06:26.777 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.062)       0:06:26.839 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:37:12 -0500 (0:00:00.051)       0:06:26.891 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:37:13 -0500 (0:00:00.465)       0:06:27.357 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:37:13 -0500 (0:00:00.037)       0:06:27.394 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:211
Saturday 08 February 2025  18:37:14 -0500 (0:00:01.004)       0:06:28.398 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:37:14 -0500 (0:00:00.099)       0:06:28.497 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:37:14 -0500 (0:00:00.068)       0:06:28.566 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:37:14 -0500 (0:00:00.060)       0:06:28.626 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thindata",
            "size": "19.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-flex-thinmeta",
            "size": "9M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-physical-originsub",
            "size": "20G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-4e9c3e41f290464abf4b333701ab5bb5-thinpool-pool",
            "size": "19.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/sda": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "f6db8a2f-c0d0-4680-b53e-55706f4ab9f1"
        },
        "/dev/sdb": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": "888f44fd-4648-4b69-9f7f-51544b1a7a9a"
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:37:14 -0500 (0:00:00.539)       0:06:29.165 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.004142",
    "end": "2025-02-08 18:37:15.395185",
    "rc": 0,
    "start": "2025-02-08 18:37:15.391043"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:37:15 -0500 (0:00:00.507)       0:06:29.672 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003371",
    "end": "2025-02-08 18:37:15.896282",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:37:15.892911"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.506)       0:06:30.179 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.348)       0:06:30.527 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.081)       0:06:30.609 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.054)       0:06:30.663 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.058)       0:06:30.722 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.104)       0:06:30.826 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.050)       0:06:30.876 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.037)       0:06:30.913 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.046)       0:06:30.960 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.059)       0:06:31.019 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:37:16 -0500 (0:00:00.111)       0:06:31.131 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:37:17 -0500 (0:00:00.091)       0:06:31.222 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:37:17 -0500 (0:00:00.108)       0:06:31.330 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:37:17 -0500 (0:00:00.086)       0:06:31.417 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:37:17 -0500 (0:00:00.068)       0:06:31.485 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:37:17 -0500 (0:00:00.585)       0:06:32.071 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.105)       0:06:32.177 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.168)       0:06:32.346 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.087)       0:06:32.434 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.118)       0:06:32.553 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.099)       0:06:32.653 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.098)       0:06:32.751 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.079)       0:06:32.830 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.084)       0:06:32.915 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.074)       0:06:32.989 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.084)       0:06:33.074 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:37:18 -0500 (0:00:00.077)       0:06:33.151 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:37:19 -0500 (0:00:00.076)       0:06:33.228 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:37:19 -0500 (0:00:00.075)       0:06:33.303 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:37:19 -0500 (0:00:00.160)       0:06:33.464 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:37:19 -0500 (0:00:00.091)       0:06:33.556 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:37:19 -0500 (0:00:00.274)       0:06:33.830 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:37:19 -0500 (0:00:00.068)       0:06:33.899 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:37:19 -0500 (0:00:00.186)       0:06:34.086 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:37:20 -0500 (0:00:00.090)       0:06:34.177 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:37:20 -0500 (0:00:00.044)       0:06:34.221 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:37:20 -0500 (0:00:00.040)       0:06:34.261 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:37:20 -0500 (0:00:00.048)       0:06:34.310 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:37:20 -0500 (0:00:00.252)       0:06:34.563 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:37:20 -0500 (0:00:00.078)       0:06:34.641 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:37:20 -0500 (0:00:00.184)       0:06:34.826 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.381461",
    "end": "2025-02-08 18:37:21.441915",
    "rc": 0,
    "start": "2025-02-08 18:37:21.060454"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sda",
                        "size": "20971520 sectors",
                        "uuid": "f6db8a2f-c0d0-4680-b53e-55706f4ab9f1"
                    },
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                        "in_use": true,
                        "path": "/dev/sdb",
                        "size": "20971520 sectors",
                        "uuid": "888f44fd-4648-4b69-9f7f-51544b1a7a9a"
                    }
                ]
            },
            "filesystems": [],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "4e9c3e41-f290-464a-bf4b-333701ab5bb5"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:37:21 -0500 (0:00:00.914)       0:06:35.740 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sda",
                                "size": "20971520 sectors",
                                "uuid": "f6db8a2f-c0d0-4680-b53e-55706f4ab9f1"
                            },
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: None",
                                "in_use": true,
                                "path": "/dev/sdb",
                                "size": "20971520 sectors",
                                "uuid": "888f44fd-4648-4b69-9f7f-51544b1a7a9a"
                            }
                        ]
                    },
                    "filesystems": [],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "4e9c3e41-f290-464a-bf4b-333701ab5bb5"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:37:21 -0500 (0:00:00.103)       0:06:35.843 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:37:21 -0500 (0:00:00.105)       0:06:35.948 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:37:21 -0500 (0:00:00.084)       0:06:36.033 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:37:21 -0500 (0:00:00.076)       0:06:36.110 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.078)       0:06:36.189 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.079)       0:06:36.268 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.067)       0:06:36.335 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.065)       0:06:36.401 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:214
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.123)       0:06:36.524 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.334)       0:06:36.858 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.139)       0:06:36.998 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:37:22 -0500 (0:00:00.128)       0:06:37.126 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.181)       0:06:37.308 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.077)       0:06:37.386 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.119)       0:06:37.505 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.102)       0:06:37.607 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.111)       0:06:37.719 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.246)       0:06:37.965 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.096)       0:06:38.062 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "name": "foo",
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "mount_point": "/opt/test1",
                    "name": "test1",
                    "size": "4g",
                    "state": "absent"
                }
            ]
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:37:23 -0500 (0:00:00.092)       0:06:38.155 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:37:24 -0500 (0:00:00.082)       0:06:38.238 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:37:24 -0500 (0:00:00.076)       0:06:38.314 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:37:24 -0500 (0:00:00.084)       0:06:38.399 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:37:24 -0500 (0:00:00.079)       0:06:38.478 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:37:24 -0500 (0:00:00.079)       0:06:38.557 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:37:24 -0500 (0:00:00.129)       0:06:38.687 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:37:24 -0500 (0:00:00.069)       0:06:38.756 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "destroy format",
            "device": "/dev/sdb",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "",
                    "_mount_id": "",
                    "_raw_device": "",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:37:28 -0500 (0:00:03.769)       0:06:42.526 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:37:28 -0500 (0:00:00.150)       0:06:42.677 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057789.5975058,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057789.5965059,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057789.5965059,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:37:29 -0500 (0:00:00.600)       0:06:43.278 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:37:29 -0500 (0:00:00.490)       0:06:43.768 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:37:29 -0500 (0:00:00.037)       0:06:43.806 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "destroy format",
                "device": "/dev/sdb",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "",
                        "_mount_id": "",
                        "_raw_device": "",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:37:29 -0500 (0:00:00.053)       0:06:43.860 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda",
                    "sdb",
                    "sdc",
                    "sdd",
                    "sde",
                    "sdf",
                    "sdg",
                    "sdh",
                    "sdi"
                ],
                "encryption": false,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": [
                    {
                        "_device": "",
                        "_mount_id": "",
                        "_raw_device": "",
                        "cache_devices": [],
                        "cache_mode": null,
                        "cache_size": 0,
                        "cached": false,
                        "compression": null,
                        "deduplication": null,
                        "disks": [],
                        "encryption": false,
                        "encryption_cipher": null,
                        "encryption_key": null,
                        "encryption_key_size": null,
                        "encryption_luks_version": null,
                        "encryption_password": null,
                        "fs_create_options": "",
                        "fs_label": "",
                        "fs_overwrite_existing": true,
                        "fs_type": "xfs",
                        "mount_check": 0,
                        "mount_device_identifier": "uuid",
                        "mount_group": null,
                        "mount_mode": null,
                        "mount_options": "defaults",
                        "mount_passno": 0,
                        "mount_point": "/opt/test1",
                        "mount_user": null,
                        "name": "test1",
                        "raid_chunk_size": null,
                        "raid_device_count": null,
                        "raid_disks": [],
                        "raid_level": null,
                        "raid_metadata_version": null,
                        "raid_spare_count": null,
                        "raid_stripe_size": null,
                        "size": "4g",
                        "state": "absent",
                        "thin": false,
                        "thin_pool_name": null,
                        "thin_pool_size": null,
                        "type": "stratis",
                        "vdo_pool_size": null
                    }
                ]
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:37:29 -0500 (0:00:00.052)       0:06:43.912 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:37:29 -0500 (0:00:00.054)       0:06:43.967 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:37:30 -0500 (0:00:00.268)       0:06:44.236 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:37:30 -0500 (0:00:00.050)       0:06:44.286 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:37:30 -0500 (0:00:00.056)       0:06:44.343 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:37:30 -0500 (0:00:00.056)       0:06:44.399 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:37:30 -0500 (0:00:00.051)       0:06:44.451 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:37:30 -0500 (0:00:00.452)       0:06:44.904 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:37:30 -0500 (0:00:00.038)       0:06:44.942 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:229
Saturday 08 February 2025  18:37:31 -0500 (0:00:00.990)       0:06:45.933 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:37:31 -0500 (0:00:00.097)       0:06:46.030 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda",
                "sdb",
                "sdc",
                "sdd",
                "sde",
                "sdf",
                "sdg",
                "sdh",
                "sdi"
            ],
            "encryption": false,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": [
                {
                    "_device": "",
                    "_mount_id": "",
                    "_raw_device": "",
                    "cache_devices": [],
                    "cache_mode": null,
                    "cache_size": 0,
                    "cached": false,
                    "compression": null,
                    "deduplication": null,
                    "disks": [],
                    "encryption": false,
                    "encryption_cipher": null,
                    "encryption_key": null,
                    "encryption_key_size": null,
                    "encryption_luks_version": null,
                    "encryption_password": null,
                    "fs_create_options": "",
                    "fs_label": "",
                    "fs_overwrite_existing": true,
                    "fs_type": "xfs",
                    "mount_check": 0,
                    "mount_device_identifier": "uuid",
                    "mount_group": null,
                    "mount_mode": null,
                    "mount_options": "defaults",
                    "mount_passno": 0,
                    "mount_point": "/opt/test1",
                    "mount_user": null,
                    "name": "test1",
                    "raid_chunk_size": null,
                    "raid_device_count": null,
                    "raid_disks": [],
                    "raid_level": null,
                    "raid_metadata_version": null,
                    "raid_spare_count": null,
                    "raid_stripe_size": null,
                    "size": "4g",
                    "state": "absent",
                    "thin": false,
                    "thin_pool_name": null,
                    "thin_pool_size": null,
                    "type": "stratis",
                    "vdo_pool_size": null
                }
            ]
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:37:31 -0500 (0:00:00.071)       0:06:46.102 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:37:31 -0500 (0:00:00.059)       0:06:46.161 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:37:32 -0500 (0:00:00.458)       0:06:46.620 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003375",
    "end": "2025-02-08 18:37:32.809293",
    "rc": 0,
    "start": "2025-02-08 18:37:32.805918"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:37:32 -0500 (0:00:00.442)       0:06:47.062 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003341",
    "end": "2025-02-08 18:37:33.253423",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:37:33.250082"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.449)       0:06:47.511 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''}]})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.125)       0:06:47.637 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.070)       0:06:47.707 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.059)       0:06:47.767 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.054)       0:06:47.821 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.098)       0:06:47.920 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.049)       0:06:47.969 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.036)       0:06:48.006 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.051)       0:06:48.057 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:37:33 -0500 (0:00:00.064)       0:06:48.122 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.063)       0:06:48.185 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.054)       0:06:48.240 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.057)       0:06:48.297 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.047)       0:06:48.344 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.061)       0:06:48.406 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.489)       0:06:48.895 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.052)       0:06:48.947 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.090)       0:06:49.038 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.069)       0:06:49.108 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:37:34 -0500 (0:00:00.051)       0:06:49.159 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.054)       0:06:49.214 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.059)       0:06:49.274 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.046)       0:06:49.320 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.045)       0:06:49.366 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.058)       0:06:49.424 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.140)       0:06:49.564 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.046)       0:06:49.611 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.046)       0:06:49.658 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.049)       0:06:49.708 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.089)       0:06:49.798 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})  => {
    "ansible_loop_var": "storage_test_lvmraid_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_lvmraid_volume": {
        "_device": "",
        "_mount_id": "",
        "_raw_device": "",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.055)       0:06:49.853 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.117)       0:06:49.971 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})  => {
    "ansible_loop_var": "storage_test_thin_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_thin_volume": {
        "_device": "",
        "_mount_id": "",
        "_raw_device": "",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.070)       0:06:50.041 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:37:35 -0500 (0:00:00.103)       0:06:50.144 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:37:36 -0500 (0:00:00.071)       0:06:50.215 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:37:36 -0500 (0:00:00.037)       0:06:50.253 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:37:36 -0500 (0:00:00.036)       0:06:50.289 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:37:36 -0500 (0:00:00.049)       0:06:50.339 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:37:36 -0500 (0:00:00.128)       0:06:50.467 ***** 
skipping: [managed-node3] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})  => {
    "ansible_loop_var": "storage_test_vdo_volume",
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False",
    "storage_test_vdo_volume": {
        "_device": "",
        "_mount_id": "",
        "_raw_device": "",
        "cache_devices": [],
        "cache_mode": null,
        "cache_size": 0,
        "cached": false,
        "compression": null,
        "deduplication": null,
        "disks": [],
        "encryption": false,
        "encryption_cipher": null,
        "encryption_key": null,
        "encryption_key_size": null,
        "encryption_luks_version": null,
        "encryption_password": null,
        "fs_create_options": "",
        "fs_label": "",
        "fs_overwrite_existing": true,
        "fs_type": "xfs",
        "mount_check": 0,
        "mount_device_identifier": "uuid",
        "mount_group": null,
        "mount_mode": null,
        "mount_options": "defaults",
        "mount_passno": 0,
        "mount_point": "/opt/test1",
        "mount_user": null,
        "name": "test1",
        "raid_chunk_size": null,
        "raid_device_count": null,
        "raid_disks": [],
        "raid_level": null,
        "raid_metadata_version": null,
        "raid_spare_count": null,
        "raid_stripe_size": null,
        "size": "4g",
        "state": "absent",
        "thin": false,
        "thin_pool_name": null,
        "thin_pool_size": null,
        "type": "stratis",
        "vdo_pool_size": null
    }
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:37:36 -0500 (0:00:00.103)       0:06:50.571 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:37:36 -0500 (0:00:00.117)       0:06:50.688 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.383336",
    "end": "2025-02-08 18:37:37.268349",
    "rc": 0,
    "start": "2025-02-08 18:37:36.885013"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.839)       0:06:51.528 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.086)       0:06:51.615 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.046)       0:06:51.661 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.051)       0:06:51.713 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.050)       0:06:51.763 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.067)       0:06:51.830 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.065)       0:06:51.896 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node3 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''})

TASK [Set storage volume test variables] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.104)       0:06:52.000 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": false,
        "_storage_volume_tests": [
            "mount",
            "fstab",
            "fs",
            "device",
            "encryption",
            "md",
            "size",
            "cache"
        ]
    },
    "changed": false
}

TASK [Run test verify for {{ storage_test_volume_subset }}] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19
Saturday 08 February 2025  18:37:37 -0500 (0:00:00.069)       0:06:52.070 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node3 => (item=mount)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node3 => (item=fstab)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node3 => (item=fs)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node3 => (item=device)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node3 => (item=encryption)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node3 => (item=md)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node3 => (item=size)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node3 => (item=cache)

TASK [Get expected mount device based on device type] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.196)       0:06:52.267 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_device_path": ""
    },
    "changed": false
}

TASK [Set some facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.053)       0:06:52.320 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_mount_expected_mount_point": "/opt/test1",
        "storage_test_swap_expected_matches": "0"
    },
    "changed": false
}

TASK [Get information about the mountpoint directory] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.066)       0:06:52.387 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and (storage_test_volume.mount_user or storage_test_volume.mount_group or storage_test_volume.mount_mode)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the current mount state by device] ********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:28
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.051)       0:06:52.439 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory user] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:36
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.047)       0:06:52.486 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_user",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory group] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:42
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.045)       0:06:52.532 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_group",
    "skip_reason": "Conditional result was False"
}

TASK [Verify mount directory permissions] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:48
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.045)       0:06:52.578 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.mount_point and storage_test_volume.mount_mode",
    "skip_reason": "Conditional result was False"
}

TASK [Get path of test volume device] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:57
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.070)       0:06:52.649 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Gather swap info] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:63
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.046)       0:06:52.695 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify swap status] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:69
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.053)       0:06:52.748 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.fs_type == \"swap\"",
    "skip_reason": "Conditional result was False"
}

TASK [Unset facts] *************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:79
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.073)       0:06:52.822 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_found_mount_stat": null,
        "storage_test_mount_expected_mount_point": null,
        "storage_test_swap_expected_matches": null,
        "storage_test_swaps": null,
        "storage_test_sys_node": null
    },
    "changed": false
}

TASK [Set some variables for fstab checking] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.216)       0:06:53.038 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": "0",
        "storage_test_fstab_expected_mount_options_matches": "0",
        "storage_test_fstab_expected_mount_point_matches": "0",
        "storage_test_fstab_id_matches": [],
        "storage_test_fstab_mount_options_matches": [],
        "storage_test_fstab_mount_point_matches": []
    },
    "changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17
Saturday 08 February 2025  18:37:38 -0500 (0:00:00.093)       0:06:53.132 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the fstab mount point] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.068)       0:06:53.201 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify mount_options] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.066)       0:06:53.267 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_verify_mount_options | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fingerprint] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.064)       0:06:53.332 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Clean up variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.070)       0:06:53.403 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_fstab_expected_id_matches": null,
        "storage_test_fstab_expected_mount_options_matches": null,
        "storage_test_fstab_expected_mount_point_matches": null,
        "storage_test_fstab_id_matches": null,
        "storage_test_fstab_mount_options_matches": null,
        "storage_test_fstab_mount_point_matches": null
    },
    "changed": false
}

TASK [Verify fs type] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.070)       0:06:53.473 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify fs label] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.052)       0:06:53.525 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type != \"stratis\"",
    "skip_reason": "Conditional result was False"
}

TASK [See whether the device node is present] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.061)       0:06:53.587 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.463)       0:06:54.051 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the device node] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.047)       0:06:54.099 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Make sure we got info about this volume] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23
Saturday 08 February 2025  18:37:39 -0500 (0:00:00.053)       0:06:54.152 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Process volume type (set initial value) (1/2)] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29
Saturday 08 February 2025  18:37:40 -0500 (0:00:00.051)       0:06:54.204 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "st_volume_type": "stratis"
    },
    "changed": false
}

TASK [Process volume type (get RAID value) (2/2)] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33
Saturday 08 February 2025  18:37:40 -0500 (0:00:00.079)       0:06:54.283 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == \"raid\"",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38
Saturday 08 February 2025  18:37:40 -0500 (0:00:00.108)       0:06:54.391 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Stat the LUKS device, if encrypted] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3
Saturday 08 February 2025  18:37:40 -0500 (0:00:00.099)       0:06:54.491 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Ensure cryptsetup is present] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10
Saturday 08 February 2025  18:37:40 -0500 (0:00:00.109)       0:06:54.600 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: cryptsetup

TASK [Collect LUKS info for this volume] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16
Saturday 08 February 2025  18:37:41 -0500 (0:00:01.491)       0:06:56.092 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the presence/absence of the LUKS device node] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.080)       0:06:56.172 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.090)       0:06:56.263 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Make sure we got info about the LUKS volume if encrypted] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.082)       0:06:56.345 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Verify the LUKS volume's device type if encrypted] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.060)       0:06:56.406 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present and storage_test_volume.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS version] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.054)       0:06:56.460 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:63
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.050)       0:06:56.511 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:75
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.051)       0:06:56.562 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:87
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.047)       0:06:56.610 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": [],
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Check for /etc/crypttab entry] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:93
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.067)       0:06:56.678 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Validate the format of the crypttab entry] *******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:100
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.085)       0:06:56.763 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check backing device of crypttab entry] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:108
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.093)       0:06:56.857 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Check key file of crypttab entry] ****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:116
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.070)       0:06:56.927 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_expected_crypttab_entries | int == 1",
    "skip_reason": "Conditional result was False"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:124
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.060)       0:06:56.988 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_expected_crypttab_entries": null,
        "_storage_test_expected_crypttab_key_file": null
    },
    "changed": false
}

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.049)       0:06:57.038 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.052)       0:06:57.090 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19
Saturday 08 February 2025  18:37:42 -0500 (0:00:00.046)       0:06:57.137 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.050)       0:06:57.188 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Set chunk size regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.067)       0:06:57.255 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.083)       0:06:57.339 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.075)       0:06:57.414 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.056)       0:06:57.471 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.077)       0:06:57.548 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.094)       0:06:57.642 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'raid'",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the actual size of the volume] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.082)       0:06:57.725 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested size of the volume] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.095)       0:06:57.820 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected size] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.112)       0:06:57.933 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28
Saturday 08 February 2025  18:37:43 -0500 (0:00:00.103)       0:06:58.037 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Get the size of parent/pool device] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.217)       0:06:58.254 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Show test pool] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.095)       0:06:58.350 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test blockinfo] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.156)       0:06:58.506 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Show test pool size] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.106)       0:06:58.613 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_test_volume_present | bool"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.135)       0:06:58.749 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Default thin pool reserved space values] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:67
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.104)       0:06:58.853 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default minimal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:71
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.100)       0:06:58.953 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Default maximal thin pool reserved space size] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:76
Saturday 08 February 2025  18:37:44 -0500 (0:00:00.137)       0:06:59.093 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate maximum usable space in thin pool] *****************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:82
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.134)       0:06:59.228 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply upper size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:86
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.094)       0:06:59.322 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Apply lower size limit to max usable thin pool space] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:91
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.085)       0:06:59.407 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Convert maximum usable thin pool space from int to Size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:96
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.080)       0:06:59.488 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show max thin pool size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:101
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.083)       0:06:59.571 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show volume thin pool size] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:105
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.077)       0:06:59.649 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Show test volume size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:109
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.080)       0:06:59.730 ***** 
skipping: [managed-node3] => {
    "false_condition": "storage_test_volume.thin"
}

TASK [Establish base value for expected thin pool size] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:113
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.075)       0:06:59.806 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected size based on pool size and percentage value] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:120
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.087)       0:06:59.893 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Establish base value for expected thin pool volume size] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:127
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.081)       0:06:59.975 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Calculate the expected thin pool volume size based on percentage value] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:131
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.082)       0:07:00.057 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Replace expected volume size with calculated value] **********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:137
Saturday 08 February 2025  18:37:45 -0500 (0:00:00.078)       0:07:00.135 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.thin",
    "skip_reason": "Conditional result was False"
}

TASK [Show actual size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:143
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.125)       0:07:00.261 ***** 
ok: [managed-node3] => {
    "storage_test_actual_size": {
        "changed": false,
        "false_condition": "_storage_test_volume_present | bool",
        "skip_reason": "Conditional result was False",
        "skipped": true
    }
}

TASK [Show expected size] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:147
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.103)       0:07:00.364 ***** 
ok: [managed-node3] => {
    "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined"
}

TASK [Assert expected size is actual size] *************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:151
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.119)       0:07:00.483 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "_storage_test_volume_present | bool",
    "skip_reason": "Conditional result was False"
}

TASK [Get information about the LV] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.119)       0:07:00.603 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV segment type] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.078)       0:07:00.681 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check segment type] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.086)       0:07:00.767 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set LV cache size] *******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.125)       0:07:00.892 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the requested cache size] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.074)       0:07:00.967 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected cache size] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37
Saturday 08 February 2025  18:37:46 -0500 (0:00:00.112)       0:07:01.079 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Check cache size] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.135)       0:07:01.215 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present",
    "skip_reason": "Conditional result was False"
}

TASK [Clean up facts] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.091)       0:07:01.307 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_volume_present": null
    },
    "changed": false
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.088)       0:07:01.395 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.067)       0:07:01.462 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Setup Tang server on localhost for testing] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:232
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.079)       0:07:01.542 ***** 
included: fedora.linux_system_roles.nbde_server for managed-node3

TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.311)       0:07:01.854 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.109)       0:07:01.964 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__nbde_server_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10
Saturday 08 February 2025  18:37:47 -0500 (0:00:00.083)       0:07:02.047 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15
Saturday 08 February 2025  18:37:48 -0500 (0:00:00.448)       0:07:02.496 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__nbde_server_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19
Saturday 08 February 2025  18:37:48 -0500 (0:00:00.053)       0:07:02.549 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__nbde_server_cachedir": "/var/cache/tang",
        "__nbde_server_group": "tang",
        "__nbde_server_keydir": "/var/db/tang",
        "__nbde_server_keygen": "/usr/libexec/tangd-keygen",
        "__nbde_server_packages": [
            "tang"
        ],
        "__nbde_server_services": [
            "tangd.socket"
        ],
        "__nbde_server_update": "/usr/libexec/tangd-update",
        "__nbde_server_user": "tang"
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml"
    ],
    "changed": false
}

TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9
Saturday 08 February 2025  18:37:48 -0500 (0:00:00.076)       0:07:02.626 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node3

TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2
Saturday 08 February 2025  18:37:48 -0500 (0:00:00.225)       0:07:02.851 ***** 
changed: [managed-node3] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: tang-15-6.fc41.x86_64",
        "Installed: llhttp-9.2.1-2.fc41.x86_64"
    ]
}
lsrpackages: tang

TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] *********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8
Saturday 08 February 2025  18:37:51 -0500 (0:00:02.607)       0:07:05.459 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "nbde_server_rotate_keys | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] *************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17
Saturday 08 February 2025  18:37:51 -0500 (0:00:00.115)       0:07:05.574 ***** 
changed: [managed-node3] => {
    "arguments": {
        "cachedir": "/var/cache/tang",
        "force": false,
        "keydir": "/var/db/tang",
        "keygen": "/usr/libexec/tangd-keygen",
        "keys_to_deploy_dir": null,
        "state": "keys-created",
        "update": "/usr/libexec/tangd-update"
    },
    "changed": true,
    "state": "keys-created"
}

TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26
Saturday 08 February 2025  18:37:52 -0500 (0:00:00.727)       0:07:06.301 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30
Saturday 08 February 2025  18:37:52 -0500 (0:00:00.093)       0:07:06.394 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node3

TASK [Ensure tang port is labeled tangd_port_t for SELinux] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2
Saturday 08 February 2025  18:37:52 -0500 (0:00:00.105)       0:07:06.500 ***** 
redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux
redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux
redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean
included: fedora.linux_system_roles.selinux for managed-node3

TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2
Saturday 08 February 2025  18:37:52 -0500 (0:00:00.213)       0:07:06.713 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node3

TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2
Saturday 08 February 2025  18:37:52 -0500 (0:00:00.165)       0:07:06.879 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] *************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7
Saturday 08 February 2025  18:37:52 -0500 (0:00:00.154)       0:07:07.034 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node3

TASK [fedora.linux_system_roles.selinux : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5
Saturday 08 February 2025  18:37:53 -0500 (0:00:00.153)       0:07:07.187 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10
Saturday 08 February 2025  18:37:53 -0500 (0:00:00.501)       0:07:07.689 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__selinux_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17
Saturday 08 February 2025  18:37:53 -0500 (0:00:00.087)       0:07:07.776 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22
Saturday 08 February 2025  18:37:54 -0500 (0:00:00.528)       0:07:08.305 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__selinux_is_transactional": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26
Saturday 08 February 2025  18:37:54 -0500 (0:00:00.099)       0:07:08.404 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "ansible_python_version is version('3', '<')",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35
Saturday 08 February 2025  18:37:54 -0500 (0:00:00.098)       0:07:08.503 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: python3-libselinux python3-policycoreutils

TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46
Saturday 08 February 2025  18:37:55 -0500 (0:00:01.566)       0:07:10.069 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "ansible_os_family == \"Suse\"",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58
Saturday 08 February 2025  18:37:55 -0500 (0:00:00.092)       0:07:10.162 ***** 
changed: [managed-node3] => {
    "changed": true,
    "rc": 0,
    "results": [
        "Installed: policycoreutils-python-utils-3.7-7.fc41.noarch"
    ]
}
lsrpackages: policycoreutils-python-utils

TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72
Saturday 08 February 2025  18:37:57 -0500 (0:00:01.807)       0:07:11.969 ***** 
skipping: [managed-node3] => {
    "false_condition": "__selinux_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77
Saturday 08 February 2025  18:37:57 -0500 (0:00:00.105)       0:07:12.075 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__selinux_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82
Saturday 08 February 2025  18:37:58 -0500 (0:00:00.098)       0:07:12.174 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__selinux_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Refresh facts] ***********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89
Saturday 08 February 2025  18:37:58 -0500 (0:00:00.098)       0:07:12.272 ***** 
ok: [managed-node3]

TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5
Saturday 08 February 2025  18:37:59 -0500 (0:00:01.060)       0:07:13.333 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13
Saturday 08 February 2025  18:37:59 -0500 (0:00:00.139)       0:07:13.473 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] *********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21
Saturday 08 February 2025  18:37:59 -0500 (0:00:00.109)       0:07:13.582 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "selinux_reboot_required": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] **********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25
Saturday 08 February 2025  18:37:59 -0500 (0:00:00.197)       0:07:13.779 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "selinux_reboot_required",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] *********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30
Saturday 08 February 2025  18:37:59 -0500 (0:00:00.106)       0:07:13.885 ***** 
skipping: [managed-node3] => {
    "false_condition": "ansible_selinux.status == \"disabled\""
}

TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35
Saturday 08 February 2025  18:37:59 -0500 (0:00:00.174)       0:07:14.059 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "selinux_all_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43
Saturday 08 February 2025  18:38:00 -0500 (0:00:00.140)       0:07:14.199 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "selinux_booleans_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48
Saturday 08 February 2025  18:38:00 -0500 (0:00:00.137)       0:07:14.337 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "selinux_fcontexts_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53
Saturday 08 February 2025  18:38:00 -0500 (0:00:00.176)       0:07:14.513 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "selinux_ports_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58
Saturday 08 February 2025  18:38:00 -0500 (0:00:00.139)       0:07:14.653 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "selinux_logins_purge | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63
Saturday 08 February 2025  18:38:00 -0500 (0:00:00.152)       0:07:14.805 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74
Saturday 08 February 2025  18:38:00 -0500 (0:00:00.121)       0:07:14.926 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87
Saturday 08 February 2025  18:38:00 -0500 (0:00:00.097)       0:07:15.023 ***** 
changed: [managed-node3] => (item={'ports': 7500, 'proto': 'tcp', 'setype': 'tangd_port_t', 'state': 'present', 'local': True}) => {
    "__selinux_item": {
        "local": true,
        "ports": 7500,
        "proto": "tcp",
        "setype": "tangd_port_t",
        "state": "present"
    },
    "ansible_loop_var": "__selinux_item",
    "changed": true,
    "ports": [
        "7500"
    ],
    "proto": "tcp",
    "setype": "tangd_port_t",
    "state": "present"
}

TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99
Saturday 08 February 2025  18:38:03 -0500 (0:00:02.842)       0:07:17.866 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112
Saturday 08 February 2025  18:38:03 -0500 (0:00:00.066)       0:07:17.933 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "selinux_checksums": true,
        "selinux_installed_modules": {
            "abrt": {
                "100": {
                    "checksum": "sha256:6e571059441d252640e3509cf407cd3a570cb9a5c5d37aa0d05bd8d193b32260",
                    "enabled": 1
                }
            },
            "accountsd": {
                "100": {
                    "checksum": "sha256:ff868726dfb38a330824ab14a3d9b09e9e8aa89ddc8ac6b22abaa1bc62149ffa",
                    "enabled": 1
                }
            },
            "acct": {
                "100": {
                    "checksum": "sha256:0b4d3710b7c16f7149311eabf067807bd9e57305ab518642cd94544430245416",
                    "enabled": 1
                }
            },
            "afs": {
                "100": {
                    "checksum": "sha256:d6320654e5962a15935e9bcdfd8eceba64deb7051fcf6592a4ce023c1ed00798",
                    "enabled": 1
                }
            },
            "afterburn": {
                "100": {
                    "checksum": "sha256:5df07939f0bd5c566bb8920f6dc3aa9a58a7b4af495b81d99a1bfdb468788471",
                    "enabled": 1
                }
            },
            "aide": {
                "100": {
                    "checksum": "sha256:4f01eeb1c4bb8b68d4c620c2579434e124da3c7113233b3e715d503010083281",
                    "enabled": 1
                }
            },
            "alsa": {
                "100": {
                    "checksum": "sha256:75ca3e6e6bfc1efcaf68401f9c1c256dcd07c27d1a4613b00da34897c3db39d3",
                    "enabled": 1
                }
            },
            "amanda": {
                "100": {
                    "checksum": "sha256:da1163091d32a68010421f1b60befb29aef78c9f2046cc7c330d276b0885dffe",
                    "enabled": 1
                }
            },
            "anaconda": {
                "100": {
                    "checksum": "sha256:61f367207b0ed849440500208623a265df726ae733e1df5fb454d04330442bc6",
                    "enabled": 1
                }
            },
            "antivirus": {
                "100": {
                    "checksum": "sha256:718557aa9547e4a7ce0c7e98fa8aab22bfbd1ccb7da591f47e16faf6f00769e2",
                    "enabled": 1
                }
            },
            "apache": {
                "100": {
                    "checksum": "sha256:e17f44d776d2385cf4d60a96a4ca3aaf7adda77e796c045d7b16e5974f401bd3",
                    "enabled": 1
                }
            },
            "apcupsd": {
                "100": {
                    "checksum": "sha256:908586a0da0d53005a23288459fb3fbfe5d9cfd42facd314da228088dc2e9ea7",
                    "enabled": 1
                }
            },
            "apm": {
                "100": {
                    "checksum": "sha256:28e25bbe215427a27e62e2ed41752101fd47d471c473ce3eac9c5de7f5871835",
                    "enabled": 1
                }
            },
            "application": {
                "100": {
                    "checksum": "sha256:3a91f2a5b7473a60ba38440265946e298936584c7954ac53b9a63926557d3d0d",
                    "enabled": 1
                }
            },
            "arpwatch": {
                "100": {
                    "checksum": "sha256:e805313c73b62c5e31b5629e698bd0131a1c0d44b35950e3043a297454912567",
                    "enabled": 1
                }
            },
            "asterisk": {
                "100": {
                    "checksum": "sha256:f31b744afc0404d748b1675b297dcda43a313e09a9caa7a91a0160157e328a39",
                    "enabled": 1
                }
            },
            "auditadm": {
                "100": {
                    "checksum": "sha256:749a0dd1d66233d92e9ab33319d12ce45a91593e1dd1bc9b9fae330001edfd38",
                    "enabled": 1
                }
            },
            "authlogin": {
                "100": {
                    "checksum": "sha256:58f186fca49e5ac2b42b380e2c71b96a4415776d7c6ab514001a17733a37d8a3",
                    "enabled": 1
                }
            },
            "automount": {
                "100": {
                    "checksum": "sha256:13207d2c4fbbef0d912ee3833a40cbf6661f6133f88c03997e92e137bea7cab6",
                    "enabled": 1
                }
            },
            "avahi": {
                "100": {
                    "checksum": "sha256:b6bfc2b90536b9266f17999303e648cd1abc6149f8798a2fe725f965e6a17fed",
                    "enabled": 1
                }
            },
            "awstats": {
                "100": {
                    "checksum": "sha256:a4b488ebf08441fe45443272f888be792feac13cf2217b472d23c19de7613200",
                    "enabled": 1
                }
            },
            "bacula": {
                "100": {
                    "checksum": "sha256:65725493fe6f4b5e2c7ba2ff09067c9efe94d436c9322783bd192c8acc9f78e5",
                    "enabled": 1
                }
            },
            "base": {
                "100": {
                    "checksum": "sha256:cb3e44061aac6579b1c95e18a2d9f0fb7bba45c12a1a1c512f8e9388e414ac62",
                    "enabled": 1
                }
            },
            "bind": {
                "100": {
                    "checksum": "sha256:c8cdd40745296910be784d2d00fab1b0c52a117ec0ca8d1d771c3ed051de5759",
                    "enabled": 1
                }
            },
            "bitlbee": {
                "100": {
                    "checksum": "sha256:4a4fe31eac32c89784a6bee3317c2c48b1848071daea27de74fecfd345313ecd",
                    "enabled": 1
                }
            },
            "blkmapd": {
                "100": {
                    "checksum": "sha256:fee85c496f6e317fa6c79e01cf7a957ceba3d7d9a8129bbf08d31b468e97708c",
                    "enabled": 1
                }
            },
            "blueman": {
                "100": {
                    "checksum": "sha256:2150b8a1afae75f4d73c76e2982ccc575f599577b8b7042d7bfa87fe2fed3a30",
                    "enabled": 1
                }
            },
            "bluetooth": {
                "100": {
                    "checksum": "sha256:8f0b86f719d0ecd073fbac5187db43d7c398f73936fecd15c44da436108e5311",
                    "enabled": 1
                }
            },
            "boinc": {
                "100": {
                    "checksum": "sha256:8435cc1276aaf80e982eb0b1149f212cb6fc1366f29e563724caf890679f2542",
                    "enabled": 1
                }
            },
            "boltd": {
                "100": {
                    "checksum": "sha256:e22f75dc547d85b02fdd0f2d4f20f69a1ca9b969ed6f9b9abf957fa02e6e1b7f",
                    "enabled": 1
                }
            },
            "boothd": {
                "100": {
                    "checksum": "sha256:bdfc7034a291d2afbe03437398e9743fc187e77c4d208ae0641f740a6dc1cfa0",
                    "enabled": 1
                }
            },
            "bootloader": {
                "100": {
                    "checksum": "sha256:ccfc94eaa4a6062dcd0996078e61e46eb550898c287d7596a85ad6f857fa69d5",
                    "enabled": 1
                }
            },
            "bootupd": {
                "100": {
                    "checksum": "sha256:6916a4cc81d388e328276fe10ad4b7d0694ab594eb0d199793148d7313235f74",
                    "enabled": 1
                }
            },
            "brctl": {
                "100": {
                    "checksum": "sha256:5165683fab4b880d741376a81061b1a467b88fcaba92225a50e46322ae74e440",
                    "enabled": 1
                }
            },
            "brltty": {
                "100": {
                    "checksum": "sha256:0230419d38276d2c823a427d44fa1362f89784c7ac17f76826be95d3c8e6de1b",
                    "enabled": 1
                }
            },
            "bugzilla": {
                "100": {
                    "checksum": "sha256:8e4fd93cbe0357fc894c7898adf9a14f8c845d87a08f673f410b702f798c2383",
                    "enabled": 1
                }
            },
            "cachefilesd": {
                "100": {
                    "checksum": "sha256:980e9a260a2fb444a12629d8621407f4572e4de7cf8943b13161b6ed10375826",
                    "enabled": 1
                }
            },
            "calamaris": {
                "100": {
                    "checksum": "sha256:4c65dae6d30fea1bebc4e63ff693b2041f18d0adb710b783002c98c529be84d8",
                    "enabled": 1
                }
            },
            "callweaver": {
                "100": {
                    "checksum": "sha256:a968d9dc972867e4e6249ae22f9dbf4266b74a7d142cae17a57a94342e1d2071",
                    "enabled": 1
                }
            },
            "canna": {
                "100": {
                    "checksum": "sha256:0602763cc3990087785d24c7cc430aefcdf02bdcfe24a7200d3edc2949d1ec6b",
                    "enabled": 1
                }
            },
            "ccs": {
                "100": {
                    "checksum": "sha256:5651ec2c95f0b2d959b59c50ae021bbb6bc15f33331132110a9836b2278144de",
                    "enabled": 1
                }
            },
            "cdrecord": {
                "100": {
                    "checksum": "sha256:ba7fd7c8a2a4d0fc658dd4aeb27b1c9c2049e7b8b5b5c3afa9523c74f6aae263",
                    "enabled": 1
                }
            },
            "certmaster": {
                "100": {
                    "checksum": "sha256:f9a4c97defbe94abd597286e65015baa5cdbc494a6404f3e467ba2484fd753d8",
                    "enabled": 1
                }
            },
            "certmonger": {
                "100": {
                    "checksum": "sha256:666501337062cf4014f547858f4a5f4d4ca875f1c41a57ca725630161d61e201",
                    "enabled": 1
                }
            },
            "certwatch": {
                "100": {
                    "checksum": "sha256:c875a519672af0d8291b9097f547dc940b9f0132a174d49cb4a332e08fe92b89",
                    "enabled": 1
                }
            },
            "cfengine": {
                "100": {
                    "checksum": "sha256:c1347bcf8c612aae6327f684b7f27ba268172bfc3dc7b027567ac4cd1512dce9",
                    "enabled": 1
                }
            },
            "cgroup": {
                "100": {
                    "checksum": "sha256:9e9eaeee64d116248c2be62574004ab15354e00ddbbc86aab0a174425ac82584",
                    "enabled": 1
                }
            },
            "chrome": {
                "100": {
                    "checksum": "sha256:0c67ea3b31cf2b67765839cccf9429c8081a7586a9bc435c1c7435cd8824be85",
                    "enabled": 1
                }
            },
            "chronyd": {
                "100": {
                    "checksum": "sha256:cab82ed9275123404e2017bd295fb3820d404c25d923bda2ef76fb91a6314348",
                    "enabled": 1
                }
            },
            "cifsutils": {
                "100": {
                    "checksum": "sha256:3a14853f222d5d4a41a72b479e21799b200ac7230e33426c296530856fe89977",
                    "enabled": 1
                }
            },
            "cinder": {
                "100": {
                    "checksum": "sha256:1bca5805677f6de8372027eab3e7b091d28d9098bf5c363090b12f4195051441",
                    "enabled": 1
                }
            },
            "cipe": {
                "100": {
                    "checksum": "sha256:a95198f88b030851c95848e09f8a0daf2f610c9d517f0ec06c397c957c1bb8f4",
                    "enabled": 1
                }
            },
            "clock": {
                "100": {
                    "checksum": "sha256:7ef3929ab6674291f68cf898f5177a42ae8b0a2cecbafe4b3b0858373058d550",
                    "enabled": 1
                }
            },
            "clogd": {
                "100": {
                    "checksum": "sha256:66e6d2d2bcf99ecfab229a5123becc308308446aaeb5dde00918429e73c6f88f",
                    "enabled": 1
                }
            },
            "cloudform": {
                "100": {
                    "checksum": "sha256:327cc94b073e22e3583d2f3b0fff723e644a6a67af8c3182f23ac93df5c23a95",
                    "enabled": 1
                }
            },
            "cmirrord": {
                "100": {
                    "checksum": "sha256:963b10f139af4312c18f4d47d01fffe051af8205c729f2f2bf2ea4c13810dc98",
                    "enabled": 1
                }
            },
            "cobbler": {
                "100": {
                    "checksum": "sha256:b63b648a2de86b24fae021093613a68861ed439474698ce39a75bc6f5ac376e5",
                    "enabled": 1
                }
            },
            "collectd": {
                "100": {
                    "checksum": "sha256:0d1a8532ecb37791ca76bc6f6e6bae5d02e33e705bb3d99b98ff124ef7bec460",
                    "enabled": 1
                }
            },
            "colord": {
                "100": {
                    "checksum": "sha256:001865716275034253ef64bd80c12db293b0c0e7132432ffc7f1cc38e156cd16",
                    "enabled": 1
                }
            },
            "comsat": {
                "100": {
                    "checksum": "sha256:8bbefb9d81d1e91ab0625144be2d9193cc9ef5ce7224ed1550d44b860deadcbd",
                    "enabled": 1
                }
            },
            "condor": {
                "100": {
                    "checksum": "sha256:3ddbb9f63e9d55225656cbb9092e5dcc1b453359707cdd73e67ef06a6aa7df9c",
                    "enabled": 1
                }
            },
            "conman": {
                "100": {
                    "checksum": "sha256:714f07b0412727d60cc1b2958dee240b59651229f1859639e09bbd714cf139ad",
                    "enabled": 1
                }
            },
            "conntrackd": {
                "100": {
                    "checksum": "sha256:e8bedb5713c0d4491d4b2a38494e8a00b6735ab4ddc40aae7dacd15f46d597a4",
                    "enabled": 1
                }
            },
            "consolekit": {
                "100": {
                    "checksum": "sha256:f0042a254008662140e60ecb5261c1ad48119774dfef32ec9ef17f1579e5bac2",
                    "enabled": 1
                }
            },
            "coreos_installer": {
                "100": {
                    "checksum": "sha256:30eb3fa480b368dcbd10cbde31fb4e129513f4fed1c1eb16a7bcc364c4b1e801",
                    "enabled": 1
                }
            },
            "couchdb": {
                "100": {
                    "checksum": "sha256:743992902ac8514de4dd54b16317a8edb9cb1e383e29d0450821a9ac304059db",
                    "enabled": 1
                }
            },
            "courier": {
                "100": {
                    "checksum": "sha256:0db5bd27caaa72a6f1aa2e3cc4138bda450a6c415e5c1fbb6328a8a39b6bcdef",
                    "enabled": 1
                }
            },
            "cpucontrol": {
                "100": {
                    "checksum": "sha256:4569f4ebfcaa55a45db1a7294ec898f3507eadaf052585b2688be8da3df8f9d1",
                    "enabled": 1
                }
            },
            "cpufreqselector": {
                "100": {
                    "checksum": "sha256:0b0bd5618356e7da7018c86dc262758bb6f04e7618de2942995ad59e897c3244",
                    "enabled": 1
                }
            },
            "cpuplug": {
                "100": {
                    "checksum": "sha256:13533345c2b7386622a2887f42ebbda604f36d2f8362f71a85a95e1cca9aa6c6",
                    "enabled": 1
                }
            },
            "cron": {
                "100": {
                    "checksum": "sha256:59373dee911745c788c719f0bbdf26fcddfedbb5e4b23ba87605bb0d3584abf1",
                    "enabled": 1
                }
            },
            "ctdb": {
                "100": {
                    "checksum": "sha256:f54e35560a64de96a3cfc2b7cb63e680d6778225ae9ead9ad478b2678f88b6d8",
                    "enabled": 1
                }
            },
            "cups": {
                "100": {
                    "checksum": "sha256:871725456b957e8a2d11b156f51231de75a7fad129f418f901fa740814817147",
                    "enabled": 1
                }
            },
            "cvs": {
                "100": {
                    "checksum": "sha256:b188990c62c036cf96deb2fd121236499dcb5cff830e38ac24b74d4692c0323d",
                    "enabled": 1
                }
            },
            "cyphesis": {
                "100": {
                    "checksum": "sha256:ffe9e6e4bb673f6822a874b86eb21769ef0f5fdbfd2e2659485abc4c6215ffa2",
                    "enabled": 1
                }
            },
            "cyrus": {
                "100": {
                    "checksum": "sha256:8b077e1368da1daaa96272b0d6f22f08bcb264617e6884c9074423a9b066912d",
                    "enabled": 1
                }
            },
            "daemontools": {
                "100": {
                    "checksum": "sha256:c9631ff4c56b588338bd87c7d8cba95e78285ecfc4f12653ea5c888d2fee8af0",
                    "enabled": 1
                }
            },
            "dbadm": {
                "100": {
                    "checksum": "sha256:f9cd1d1c850a023d882c130cfad0d17c3d5af781610f6e20dce354e3ac9b03f9",
                    "enabled": 1
                }
            },
            "dbskk": {
                "100": {
                    "checksum": "sha256:b0eaf034b7f124c6ebb302f98543c5fb3c2191b1c584e73b994468d9b8771959",
                    "enabled": 1
                }
            },
            "dbus": {
                "100": {
                    "checksum": "sha256:9f9c5f5ba68fdf9160d7cc88d4c5e82072094cc04348d802bb30771d233374f7",
                    "enabled": 1
                }
            },
            "dcc": {
                "100": {
                    "checksum": "sha256:c6d2bd4c0a753011b65e4b41f82aa48fe811645a474e546e8cc2291f7c7a49eb",
                    "enabled": 1
                }
            },
            "ddclient": {
                "100": {
                    "checksum": "sha256:7d2c4be9226c27c500f8a96188b2864ad5899f206296059ab8de4d3f162ec09a",
                    "enabled": 1
                }
            },
            "denyhosts": {
                "100": {
                    "checksum": "sha256:7a239615a4db634ae1ae8564acc54c55c86c68bd2e44d47ad9c24758d492b22c",
                    "enabled": 1
                }
            },
            "devicekit": {
                "100": {
                    "checksum": "sha256:1aaf1972825b0e1cba355be471b2a866e39374b5919b0b6c9796fa406a769be2",
                    "enabled": 1
                }
            },
            "dhcp": {
                "100": {
                    "checksum": "sha256:0871b8968f053316c9615743b4970c27c2c0f78a61fe8716c24f699703c58c1a",
                    "enabled": 1
                }
            },
            "dictd": {
                "100": {
                    "checksum": "sha256:00bea61c43d18e45979bd3481b0c18f6cddb38a411a4f19ae7e6120d7555455f",
                    "enabled": 1
                }
            },
            "dirsrv": {
                "100": {
                    "checksum": "sha256:b38f878e9af27bcff6ae8993c8a217321dd1fc6c62334090addfb14aebba5c95",
                    "enabled": 1
                }
            },
            "distcc": {
                "100": {
                    "checksum": "sha256:a9e6800d2e2993c1f318a8737647f2e450d41f692c11b09f6bdffac7fc556c23",
                    "enabled": 1
                }
            },
            "dmesg": {
                "100": {
                    "checksum": "sha256:433fd6e718c9ae9285c35aff260d62aac813fe8bd414e5986ab1712e888aaf49",
                    "enabled": 1
                }
            },
            "dmidecode": {
                "100": {
                    "checksum": "sha256:2d65a17644f1edf61eb31c2c66d1268511fba016ff9a7758b2bb09cdf98d2706",
                    "enabled": 1
                }
            },
            "dnsmasq": {
                "100": {
                    "checksum": "sha256:a1dae79a0a2d2996882a55085cd82b8b5a39b5b78f91f5f016506878b319eecb",
                    "enabled": 1
                }
            },
            "dnssec": {
                "100": {
                    "checksum": "sha256:126692a2df2eb7609f7e3e4c2c930ee13d2431a92caadef2747c960d0d65f457",
                    "enabled": 1
                }
            },
            "dovecot": {
                "100": {
                    "checksum": "sha256:cdf84ac91260f7cacec107a1a7a8eb750b17fb10b7778b07147f2583f791e26a",
                    "enabled": 1
                }
            },
            "drbd": {
                "100": {
                    "checksum": "sha256:9e478ef621ae90160e3d121cb167286e9737fd213582accd366424eda11ca747",
                    "enabled": 1
                }
            },
            "dspam": {
                "100": {
                    "checksum": "sha256:90cdf514efbb5bf01f1e2b90d7d58eed2e2b458242905b5cfa32e1de01d71da3",
                    "enabled": 1
                }
            },
            "entropyd": {
                "100": {
                    "checksum": "sha256:705fd821fa0f0ba75a507e8363f78787f84c29f8c2d3a44fa243cd05d2c8f8ee",
                    "enabled": 1
                }
            },
            "exim": {
                "100": {
                    "checksum": "sha256:05b1fb70ea49e4e9cea9896284412cff82be88269d3e95abcb4daa61136c4da6",
                    "enabled": 1
                }
            },
            "fcoe": {
                "100": {
                    "checksum": "sha256:33cab79d9b81dad37457565590370531db36d0e4a5b2ef73cd52a33b37816956",
                    "enabled": 1
                }
            },
            "fdo": {
                "100": {
                    "checksum": "sha256:6b9661cfe29bccc779264f230ded766a207ef3937aec125e5cc9a55dea4b1e8f",
                    "enabled": 1
                }
            },
            "fedoratp": {
                "100": {
                    "checksum": "sha256:d6f5f286ed1d5705e62598f143c0150deb59d708d7dd1ad89bdc6d68f59ed853",
                    "enabled": 1
                }
            },
            "fetchmail": {
                "100": {
                    "checksum": "sha256:3acaf8ff26c8d81010cc1ccf88872df39199279fa44d85b06903c97f50def08c",
                    "enabled": 1
                }
            },
            "finger": {
                "100": {
                    "checksum": "sha256:749fc5a3e4ba8771a75dc712f7795a929e147cafa8dc04485c631aad0a37da5f",
                    "enabled": 1
                }
            },
            "firewalld": {
                "100": {
                    "checksum": "sha256:bc1d1373b9a03d7adf44fcad82b369d1ebdd0f2e003cbf618411af40c1ac94d9",
                    "enabled": 1
                }
            },
            "firewallgui": {
                "100": {
                    "checksum": "sha256:30d11ebcc3525e38c2a1532661404be432cbf6d4f0f4fb67c51fd9f32c3b7b92",
                    "enabled": 1
                }
            },
            "firstboot": {
                "100": {
                    "checksum": "sha256:d0521b45eebcd324ec966781f0d10ef06791c68b7e53b8272775bfd953a3fa08",
                    "enabled": 1
                }
            },
            "fprintd": {
                "100": {
                    "checksum": "sha256:ff74bb68f3369f30d2ae98cfce6acecbd6b7adbad46a18cc03bba25037a26beb",
                    "enabled": 1
                }
            },
            "freeipmi": {
                "100": {
                    "checksum": "sha256:cc140de8fcb92cb588d19273d8f67619b5fcbef1a114ed805621a0cd11e4ebc3",
                    "enabled": 1
                }
            },
            "freqset": {
                "100": {
                    "checksum": "sha256:3f2c6b1336ed28a0a7d90f283e9289a5f83e5ceb71d11b7814fdc65ae529314a",
                    "enabled": 1
                }
            },
            "fstools": {
                "100": {
                    "checksum": "sha256:66707b09fc0bf73662040a6d5ed43e5ddc851704c29a129f7a4d0e223ea10f63",
                    "enabled": 1
                }
            },
            "ftp": {
                "100": {
                    "checksum": "sha256:d281a5a1ad47bac3ef7854fa5792f08f9e70051eb6d577de2e66a4916809474e",
                    "enabled": 1
                }
            },
            "fwupd": {
                "100": {
                    "checksum": "sha256:3da67a17b85cde7a0f181e6599726357fb2eeeb251fdd980a9d6669772415922",
                    "enabled": 1
                }
            },
            "games": {
                "100": {
                    "checksum": "sha256:020d175da904ce5e5f94eaab9e60a4a86b70ed9767f7660157c7fa459c747b34",
                    "enabled": 1
                }
            },
            "gdomap": {
                "100": {
                    "checksum": "sha256:f41fda08cf3bd33ac571a4ca5d6d505a6dc7062c29e45b1d5d160c953c532230",
                    "enabled": 1
                }
            },
            "geoclue": {
                "100": {
                    "checksum": "sha256:beec56305b71ccb0174a7c790eb972244d10b8381a84812f01021b9e8eb7ad99",
                    "enabled": 1
                }
            },
            "getty": {
                "100": {
                    "checksum": "sha256:5890afc9d363e3af10fbc90898f6ba211ae65b08b58d2819208829da1a0aa1d9",
                    "enabled": 1
                }
            },
            "git": {
                "100": {
                    "checksum": "sha256:9f2c83dc9cafe17c8d650f8c78c39c61bd3d2c8b4d77a535eb61ce6a04a3e3f4",
                    "enabled": 1
                }
            },
            "gitosis": {
                "100": {
                    "checksum": "sha256:b7decc3fbc86e4b9e3fe1fec22021737ad607cf795224b5d5793069716a7eda8",
                    "enabled": 1
                }
            },
            "glance": {
                "100": {
                    "checksum": "sha256:097520793eaae6b878f14f2b767d40a80007786065abf1b252d898a685957fc5",
                    "enabled": 1
                }
            },
            "glusterd": {
                "100": {
                    "checksum": "sha256:c6e3a1456d1166b4a1557097ac611d03fbc418da3ebf1c4a8ce51a2281731ee9",
                    "enabled": 1
                }
            },
            "gnome": {
                "100": {
                    "checksum": "sha256:36acfed6d94c2bc1125d70025b8329db893614e225dc0e56d84b3b7ed69f0003",
                    "enabled": 1
                }
            },
            "gnome_remote_desktop": {
                "100": {
                    "checksum": "sha256:0f80ef380993963d0da8d29657d92499608e5bca57ac14dd40bc0133bfb03413",
                    "enabled": 1
                }
            },
            "gpg": {
                "100": {
                    "checksum": "sha256:652eb4dd8e376e48b1389de31b94f634f35dd2d1e9ac0da4dbbb1334df750af2",
                    "enabled": 1
                }
            },
            "gpm": {
                "100": {
                    "checksum": "sha256:8187fd4af8e847bf5febdbaf54f165788fd16dae5189b66d683dec60596b1172",
                    "enabled": 1
                }
            },
            "gpsd": {
                "100": {
                    "checksum": "sha256:f610bf0be7884be69e68856baa7450c4a00853d0ceb96c79e0e884fb2ee942a1",
                    "enabled": 1
                }
            },
            "gssproxy": {
                "100": {
                    "checksum": "sha256:97641e46c6076b5179127c6932ce33b1f8d31e70d78a7d79c7efcc3493ef3370",
                    "enabled": 1
                }
            },
            "guest": {
                "100": {
                    "checksum": "sha256:14d0ea70e0abbd99618ceccb777aaf0fc917f6de4bad7bcf15f05cb2eac28085",
                    "enabled": 1
                }
            },
            "hddtemp": {
                "100": {
                    "checksum": "sha256:d57cb86ca4b40fd974e5c4e1288ca5b5dfe69f5794240be1d75f153014580d24",
                    "enabled": 1
                }
            },
            "hostapd": {
                "100": {
                    "checksum": "sha256:57336b59a92ab85d730e65bb73d6fdac377dc062acbdc1e9e380a9e70b7d816f",
                    "enabled": 1
                }
            },
            "hostname": {
                "100": {
                    "checksum": "sha256:2456eab7918420953167d81692c853d42d3f91d75c9b51b6cb1546a44f1493fc",
                    "enabled": 1
                }
            },
            "hsqldb": {
                "100": {
                    "checksum": "sha256:fb70fffdeca1376649a7482969485ff476064fa0d81278c618f06bb62b9afa77",
                    "enabled": 1
                }
            },
            "hwloc": {
                "100": {
                    "checksum": "sha256:5de860596f6e5bee1d1d4151efbb27f239ac83ec4a53dc3183073b123d321415",
                    "enabled": 1
                }
            },
            "hypervkvp": {
                "100": {
                    "checksum": "sha256:e4bc151609f29e17034f474544284d19c570feb75b8926bb09f6081cc27173f8",
                    "enabled": 1
                }
            },
            "ibacm": {
                "100": {
                    "checksum": "sha256:6ec50390737f84638d0ad98e77715a09857fc999aab7cf30835c1ccfe7a909dc",
                    "enabled": 1
                }
            },
            "ica": {
                "100": {
                    "checksum": "sha256:bd8eccb4266130eddfae34318855e6aa3a8624998f0a906a970a04802510ad86",
                    "enabled": 1
                }
            },
            "icecast": {
                "100": {
                    "checksum": "sha256:3199646b83c13466308cbe243d6c75f828875ab0dfca1deb585dcd7bfb28ae60",
                    "enabled": 1
                }
            },
            "iiosensorproxy": {
                "100": {
                    "checksum": "sha256:2f70fe3a940a9fa12e99b838d1b0eabf9a5ee073cc35626a07c0576f0497ab64",
                    "enabled": 1
                }
            },
            "inetd": {
                "100": {
                    "checksum": "sha256:bbea7bd0c76693df24eefcb14bb3389efed0f4e9f96353470f9ebd5f014f0845",
                    "enabled": 1
                }
            },
            "init": {
                "100": {
                    "checksum": "sha256:20afd0d8466ac4b1b85683bd202d7a14f1938fe6bc8b8aa41a41566c64d64f10",
                    "enabled": 1
                }
            },
            "inn": {
                "100": {
                    "checksum": "sha256:139302f060755bbcfd0fb94d40854ad71a35b9318dd44c8be69f2c6d114f0e3e",
                    "enabled": 1
                }
            },
            "insights_client": {
                "100": {
                    "checksum": "sha256:75ac9cf6992502e3302f84ec39a6e7f1970efc13752181753a975d05c9e0e91c",
                    "enabled": 1
                }
            },
            "iodine": {
                "100": {
                    "checksum": "sha256:6aa1d18652a868ff83c45fd9393499416d978fd6d5d6dc8663f085335db48a0e",
                    "enabled": 1
                }
            },
            "iotop": {
                "100": {
                    "checksum": "sha256:102db291cb647f60032c139d0b5dcdeccdbe41c74f5c9fca6db560c17b61412d",
                    "enabled": 1
                }
            },
            "ipmievd": {
                "100": {
                    "checksum": "sha256:59835fa42cdb6a94eedb9b4a264c6789084eff696b8efeecce8b48cd786a2dd6",
                    "enabled": 1
                }
            },
            "ipsec": {
                "100": {
                    "checksum": "sha256:24c89979739ae9c391acdaddc1d54325153085c1843626ec0f544def2b021a04",
                    "enabled": 1
                }
            },
            "iptables": {
                "100": {
                    "checksum": "sha256:fda81f1fd9848ff6358d82191ad02d672ad1ecc7ba5b76edb604c2e966eb6e4f",
                    "enabled": 1
                }
            },
            "irc": {
                "100": {
                    "checksum": "sha256:1ed937287398d60e302478f4880a25c1aef605a5d370c1f583426c59067d8ed5",
                    "enabled": 1
                }
            },
            "irqbalance": {
                "100": {
                    "checksum": "sha256:484bae0572ba408d7804477484a497964dfc9e2b7cd0e404418fa89947d42064",
                    "enabled": 1
                }
            },
            "iscsi": {
                "100": {
                    "checksum": "sha256:09019b87ded28376309ec057f0a3d57b753c52a47665834e3455b10c17d0b0f8",
                    "enabled": 1
                }
            },
            "isns": {
                "100": {
                    "checksum": "sha256:cdccb8cba84e7ae30ef624cd9d265ef4dbbfc7c96882e30c6815d7c1ed961464",
                    "enabled": 1
                }
            },
            "jabber": {
                "100": {
                    "checksum": "sha256:08288d791f1b1aa3a8f154ae7b2ff73d574560bf5bb82246020baa3c26f6c988",
                    "enabled": 1
                }
            },
            "jetty": {
                "100": {
                    "checksum": "sha256:c2dbeb224e0a9bfbae257131f0cf3f1582977b1e57855c244582fa107a92cd0d",
                    "enabled": 1
                }
            },
            "jockey": {
                "100": {
                    "checksum": "sha256:f35d7f2d61175ba7f908459f738c7d6b6adf2ae678d88531fa527c4d50b21985",
                    "enabled": 1
                }
            },
            "journalctl": {
                "100": {
                    "checksum": "sha256:b22c4353aeef37f46ea73e660efadb81b861070d84427755b19f0e2bed596354",
                    "enabled": 1
                }
            },
            "kafs": {
                "100": {
                    "checksum": "sha256:5a35385072b421cdb86fc66190dc00e6c8e7a587a60ffeedfe2a931b6b92a11d",
                    "enabled": 1
                }
            },
            "kdump": {
                "100": {
                    "checksum": "sha256:058546072554365d8129445e8e0007ab568b596abbe785601fa66034c94819d3",
                    "enabled": 1
                }
            },
            "kdumpgui": {
                "100": {
                    "checksum": "sha256:f731b921881e2281da507bffc85f3a91b51923b42640c8cf3946758fe3c9b607",
                    "enabled": 1
                }
            },
            "keepalived": {
                "100": {
                    "checksum": "sha256:36ccc7f68a1adff12b67b6803a81932a870dd90c4eb2917b4a9c984f1b4cbd11",
                    "enabled": 1
                }
            },
            "kerberos": {
                "100": {
                    "checksum": "sha256:cef0f17dc19fba37aca82af7923dee5d1bb454d2b1ddf7255fd0edc50d4850df",
                    "enabled": 1
                }
            },
            "keyboardd": {
                "100": {
                    "checksum": "sha256:4eae5ab2b926695b3ed881a26e7b6a79f4dd48c1ffa3e3c6809cfa4958b54091",
                    "enabled": 1
                }
            },
            "keystone": {
                "100": {
                    "checksum": "sha256:6d9a8c7420b3aee2c752119ef6d72dc828cd76ea4f535bdcd44461a3cbb43454",
                    "enabled": 1
                }
            },
            "keyutils": {
                "100": {
                    "checksum": "sha256:4c36d2edd04848bff37b78ee9b6a1a3b0ae88a7e7434ceee07499941fd65fc44",
                    "enabled": 1
                }
            },
            "kismet": {
                "100": {
                    "checksum": "sha256:30ba7e91c6fe3cff8ac488e6160a5037fb8f1719275991ec44e5bfcbd9089aca",
                    "enabled": 1
                }
            },
            "kpatch": {
                "100": {
                    "checksum": "sha256:1a66f12e23daeda704408bfc0b55596c52dfd289c716a3d31e4ab1525bf9c961",
                    "enabled": 1
                }
            },
            "ksmtuned": {
                "100": {
                    "checksum": "sha256:0090de03760bfc344382161d6e20188431ef4cf3ac4f7c7d09fc8a6da0e194c5",
                    "enabled": 1
                }
            },
            "ktalk": {
                "100": {
                    "checksum": "sha256:3b11334ce4a06dc286895d270da44c399bc2f511ac810cce9de3def5d69fad91",
                    "enabled": 1
                }
            },
            "ktls": {
                "100": {
                    "checksum": "sha256:eede8f227c3c551c567b7fd8bc33017662d509251831aa3f4e600861473ac833",
                    "enabled": 1
                }
            },
            "l2tp": {
                "100": {
                    "checksum": "sha256:7e73fc9a1be125abe5dbab8143f1ea0c49c3ac5b4b2d54f585431b917f547e23",
                    "enabled": 1
                }
            },
            "ldap": {
                "100": {
                    "checksum": "sha256:73ef6bf64cbd0ffcf8dbf21dcefc95947d3bae1f09937cfeb31d1a7081775cdf",
                    "enabled": 1
                }
            },
            "libraries": {
                "100": {
                    "checksum": "sha256:eb10eb4c83992a6128376a978cb008c7abc2936626998abefd7fe8b00a4893f8",
                    "enabled": 1
                }
            },
            "likewise": {
                "100": {
                    "checksum": "sha256:43117a650ceba47fbef8424092eb1bd119cc248aa67514f1e51246ed3af07967",
                    "enabled": 1
                }
            },
            "lircd": {
                "100": {
                    "checksum": "sha256:bea08a06c64cf56277de0fea67675d1a69cdf34f37e1cc1c63f6b3c9c6a7edec",
                    "enabled": 1
                }
            },
            "livecd": {
                "100": {
                    "checksum": "sha256:9070ba1f03a04a5906bafb1399a0eb181dfdeab868dfb021505c116c61decbe3",
                    "enabled": 1
                }
            },
            "lldpad": {
                "100": {
                    "checksum": "sha256:df9b83c5a19829e187aa9460a6dfa2607b35cd454bc2200a65e548b1ccd0d5a8",
                    "enabled": 1
                }
            },
            "loadkeys": {
                "100": {
                    "checksum": "sha256:b7087172d332fe7a74fbaaedb9892ebf5a1c5eec52963a34bb92d73446566dea",
                    "enabled": 1
                }
            },
            "locallogin": {
                "100": {
                    "checksum": "sha256:c65f9e01840a5776e896f4505f356ffef8932e3cd5a2a09b42de4073b2b4d966",
                    "enabled": 1
                }
            },
            "lockdev": {
                "100": {
                    "checksum": "sha256:53e31c3a026736d7f73cedf1b0495a414fc9a1fb2e7fbcfc4fc851d98d9def5b",
                    "enabled": 1
                }
            },
            "logadm": {
                "100": {
                    "checksum": "sha256:8f962b8f2e2d555baaad73cb042106196d29805971e3351620c396ed3dd941db",
                    "enabled": 1
                }
            },
            "logging": {
                "100": {
                    "checksum": "sha256:88b317043685137a5fcf3bfd2b2552616761d16fd6f59da87047c8e8b5154e74",
                    "enabled": 1
                }
            },
            "logrotate": {
                "100": {
                    "checksum": "sha256:e6e0365560eeff3c2696fa4018548558151620ba597c67fd7e186f6880ae331d",
                    "enabled": 1
                }
            },
            "logwatch": {
                "100": {
                    "checksum": "sha256:92ce174b65baa4f1182d8edeaca01b39dcc0d0beecb71badce3bfb845dd525fb",
                    "enabled": 1
                }
            },
            "lpd": {
                "100": {
                    "checksum": "sha256:1d44ee585a65abe533b90aeb5b89bbe34217b7a0a7cc0feade0b8cb6386ea65e",
                    "enabled": 1
                }
            },
            "lsm": {
                "100": {
                    "checksum": "sha256:3979d77408c282fc3abfe32908df4889d7ff0ee8096ccb68a6a91d44bebfa56a",
                    "enabled": 1
                }
            },
            "lttng-tools": {
                "100": {
                    "checksum": "sha256:b216fc3f1025723b5bf96002b39dcb9ae8c80c6d6708dc2db348f419af025a45",
                    "enabled": 1
                }
            },
            "lvm": {
                "100": {
                    "checksum": "sha256:decf5c2093ef0a9220acda78d06c9ff44303da23730730d095929569c7166210",
                    "enabled": 1
                }
            },
            "mailman": {
                "100": {
                    "checksum": "sha256:0abc78cfd35d9e43c0c21983a250f874ee7ca7c10d7eb1c6e0d3106f538cf482",
                    "enabled": 1
                }
            },
            "mailscanner": {
                "100": {
                    "checksum": "sha256:eaf22ef2b2cc5858d9ff29e93eb84ea7a849e2e00d274f81c478ab12215b804c",
                    "enabled": 1
                }
            },
            "man2html": {
                "100": {
                    "checksum": "sha256:3269314b5d8bbfc843900344d65e604432222bebc7e2dd628abb9d186fca1cd1",
                    "enabled": 1
                }
            },
            "mandb": {
                "100": {
                    "checksum": "sha256:c5c6c2cfc4930bb865330ac4bf1e04976290772b0e058cce0d73de071ccf7819",
                    "enabled": 1
                }
            },
            "mcelog": {
                "100": {
                    "checksum": "sha256:b3412c39210371e395d6b4d88d4daf00b5699dc8dd50c435d1d69d91f062aad3",
                    "enabled": 1
                }
            },
            "mediawiki": {
                "100": {
                    "checksum": "sha256:4fb79885af1e12d162aaea04067c9dbd19c248b8bc6bb4fa91240acb6986c84c",
                    "enabled": 1
                }
            },
            "memcached": {
                "100": {
                    "checksum": "sha256:10e59adbf0210a83d2506cdcc189bed58fbb49f01ecc07afe00a5fc06a0f025c",
                    "enabled": 1
                }
            },
            "milter": {
                "100": {
                    "checksum": "sha256:5c74ad17eebd07a5efeda9e79bcbf4bbf4451146a6633739f83cb9e4d6c60802",
                    "enabled": 1
                }
            },
            "minidlna": {
                "100": {
                    "checksum": "sha256:3f6bca0a0db5cd51a82ae6c24e7fba8c326f31865c4b248a0e909469f06ae09f",
                    "enabled": 1
                }
            },
            "minissdpd": {
                "100": {
                    "checksum": "sha256:590bfd4dbff52f0ed68fe19c247e25d5299ac8448d2bd4ccc99084f85bdeb0ab",
                    "enabled": 1
                }
            },
            "miscfiles": {
                "100": {
                    "checksum": "sha256:4d0081333a7d5871e89e8955ab640c3374bf2bde197557cb6a83e122aee9137e",
                    "enabled": 1
                }
            },
            "mock": {
                "100": {
                    "checksum": "sha256:81d14741e1078a372f85f1cfa437ca54b0f0a97e28463ab4036e2791ba14368e",
                    "enabled": 1
                }
            },
            "modemmanager": {
                "100": {
                    "checksum": "sha256:f49d1a951d218cb0242a2a10b1f6d0b38d7180b50b57347d11b7c29c5863f733",
                    "enabled": 1
                }
            },
            "modutils": {
                "100": {
                    "checksum": "sha256:ae99eeccd230dbe62cc2d6b55bc87d272a3523c8284d884977fede6c9472220a",
                    "enabled": 1
                }
            },
            "mojomojo": {
                "100": {
                    "checksum": "sha256:ce4c112a710ad3b571ab733fbf785a6ac7b9eeb9c29f3ced5f994965e9386ab4",
                    "enabled": 1
                }
            },
            "mon_statd": {
                "100": {
                    "checksum": "sha256:b2a55057a35231d41eeb00c651b5a4e7af901a79090c8afeb44fad8b27928d2f",
                    "enabled": 1
                }
            },
            "mongodb": {
                "100": {
                    "checksum": "sha256:83b77a2ea78b70ab976e70aa831a72f890ae212a6ab100d66f6bcb29e1bac0c9",
                    "enabled": 1
                }
            },
            "motion": {
                "100": {
                    "checksum": "sha256:13c108e4971acbc506790783af8287ab766941ef3745594a69e020dfa769e75d",
                    "enabled": 1
                }
            },
            "mount": {
                "100": {
                    "checksum": "sha256:096e1169c82afdd0cadfbb7c70210e6aa1e8a7782aa0149a14b353ee3e20dc21",
                    "enabled": 1
                }
            },
            "mozilla": {
                "100": {
                    "checksum": "sha256:c8f4b2cd46adef88902943506de8358f988c8e867e681fb9029f7f8e4bcc3a2c",
                    "enabled": 1
                }
            },
            "mpd": {
                "100": {
                    "checksum": "sha256:2b0acf523137d68497ea06f19b5494cb5c7ad6e4102fd7c626b9b74fef062f19",
                    "enabled": 1
                }
            },
            "mplayer": {
                "100": {
                    "checksum": "sha256:b68a6f0cf3b61e296607fd8557480c3706ee0d211722c53708b8605d387bb434",
                    "enabled": 1
                }
            },
            "mptcpd": {
                "100": {
                    "checksum": "sha256:3d4452b8e5b08d56b16c141515f2169809ab160544da9164cd91c3872fa5ca59",
                    "enabled": 1
                }
            },
            "mrtg": {
                "100": {
                    "checksum": "sha256:6757ab3ee1b84340b5a58935d090b926ffc96f43d03c9cb243802b4d01d5a29b",
                    "enabled": 1
                }
            },
            "mta": {
                "100": {
                    "checksum": "sha256:02fd247f20410b0bf8b28860957fbf33ed26daac462d2de943ca3015e193efd4",
                    "enabled": 1
                }
            },
            "munin": {
                "100": {
                    "checksum": "sha256:879dcfa56b38100213e182a61d0692046377116a87f5892064768735ee9970e0",
                    "enabled": 1
                }
            },
            "mysql": {
                "100": {
                    "checksum": "sha256:bb8884e26ad17666037298c10b15dec713cf8c42ab522f7b25453150d9681833",
                    "enabled": 1
                }
            },
            "mythtv": {
                "100": {
                    "checksum": "sha256:f260b769c9d70fe26cddea75c71e1c3d16348233c40a3b8a844358d138c19d2f",
                    "enabled": 1
                }
            },
            "nagios": {
                "100": {
                    "checksum": "sha256:9b30f74c696be7ddfe60051035f3248e25577c1c82e6d0742ee3d2555d29f23a",
                    "enabled": 1
                }
            },
            "namespace": {
                "100": {
                    "checksum": "sha256:cc65424c4edcef752cf3d9223a0a49d84f7250bbc9c42d08d0b5727e0168dff6",
                    "enabled": 1
                }
            },
            "ncftool": {
                "100": {
                    "checksum": "sha256:26920eac2c6eb35ab68b04be278db9b2fe9bf461e2619dd3a28b79e9193f6c5c",
                    "enabled": 1
                }
            },
            "netlabel": {
                "100": {
                    "checksum": "sha256:1fd2a77b13726759bb892c0f152a0a9b56cc912d8ec8ae05d95618c0b7a157ed",
                    "enabled": 1
                }
            },
            "netutils": {
                "100": {
                    "checksum": "sha256:078e60b0fd4cf5cc4801e6145e7bd0ee07b0569fde1edbc5bff410282428e607",
                    "enabled": 1
                }
            },
            "networkmanager": {
                "100": {
                    "checksum": "sha256:1fa2a3044483bf0c0b6ab5c2af3b35297e18eb4328a75bc04c3151f5e31f42c1",
                    "enabled": 1
                }
            },
            "ninfod": {
                "100": {
                    "checksum": "sha256:ae4d23f2e9eb9580bd636cd7299d4d26649d59cc4e83e07f65505f315b4027b3",
                    "enabled": 1
                }
            },
            "nis": {
                "100": {
                    "checksum": "sha256:cd939a9294329e1d1f1a5be4e33cdaa4d89e4f78dde0a801c3b7d501323040fe",
                    "enabled": 1
                }
            },
            "nova": {
                "100": {
                    "checksum": "sha256:000c5853f5261b435c8a0362720ce04a1ed300aed5f22dfee9659e8a024466a0",
                    "enabled": 1
                }
            },
            "nscd": {
                "100": {
                    "checksum": "sha256:c5d30e10b3bbf87c64fe2afda70c803c7ffbb5f64b15f0f9894fa4b6eade41ad",
                    "enabled": 1
                }
            },
            "nsd": {
                "100": {
                    "checksum": "sha256:b3d665ce7b46965b31e35a99716b4cefcaffb36c93c178f773d291fe93f8af25",
                    "enabled": 1
                }
            },
            "nslcd": {
                "100": {
                    "checksum": "sha256:32091e44affc2766f06bf0f4c3187f27168b574c07b65c3c4767dc2a68dc75dd",
                    "enabled": 1
                }
            },
            "ntop": {
                "100": {
                    "checksum": "sha256:e628889b6330a5c4d4426b41eb5988e26bc9469cb66e01be6b4c6c000d694e2a",
                    "enabled": 1
                }
            },
            "ntp": {
                "100": {
                    "checksum": "sha256:be1c2f9a0a19271f6b88e6878a67f929ca000c952045302c886bbbafdb7c656e",
                    "enabled": 1
                }
            },
            "numad": {
                "100": {
                    "checksum": "sha256:662412fc8c21dd433e2f2275377df80c839b47fb097a88693722189d33fa0a8a",
                    "enabled": 1
                }
            },
            "nut": {
                "100": {
                    "checksum": "sha256:e54f8a0a0779de2cae093c087aa9fcbfc70333be012868b00ac0fe25e0a21646",
                    "enabled": 1
                }
            },
            "nvme_stas": {
                "100": {
                    "checksum": "sha256:4886f36ce881d3be5284a9dfd93874c800c965bb46146168e633b250dd9d0d2d",
                    "enabled": 1
                }
            },
            "nx": {
                "100": {
                    "checksum": "sha256:dd19058574329f7c7ce709bc94f6c4be87028cdd184cc365a61d5c0113b78bdf",
                    "enabled": 1
                }
            },
            "obex": {
                "100": {
                    "checksum": "sha256:649c497ab74a203064009d553b42829aac89ac5c4273b7c0ccc0a23530001fcc",
                    "enabled": 1
                }
            },
            "oddjob": {
                "100": {
                    "checksum": "sha256:da789cca84eb333331387826f63e8a0d41e0774f42f5f763c83de6451298354f",
                    "enabled": 1
                }
            },
            "opafm": {
                "100": {
                    "checksum": "sha256:06c9fb3964a855ec2ffd00719ed70b104d40e4b33720e2b109cd22ccc7157b8b",
                    "enabled": 1
                }
            },
            "opendnssec": {
                "100": {
                    "checksum": "sha256:a06e9a5b2b534d412b4783de05752f161c5c56a8948278cb1850f115494470c5",
                    "enabled": 1
                }
            },
            "openfortivpn": {
                "100": {
                    "checksum": "sha256:75fa83f68fa800bef996da027f14f6c8f0ded93f93569f3b559495dba39a6176",
                    "enabled": 1
                }
            },
            "openhpid": {
                "100": {
                    "checksum": "sha256:1e9015e8ebaa39a1d77e582423e4849371106eb1ad0fe9efca6a1dd46c69a7b7",
                    "enabled": 1
                }
            },
            "openshift": {
                "100": {
                    "checksum": "sha256:71e3093e4da6ab0f9866ccee84716cea1b7c0b7616a01bf14e8f45d855139cac",
                    "enabled": 1
                }
            },
            "openshift-origin": {
                "100": {
                    "checksum": "sha256:e8e6bb283142b08b3a1ce7c097f49f402bb6d59eb6d03138ef0b69f3579e466c",
                    "enabled": 1
                }
            },
            "opensm": {
                "100": {
                    "checksum": "sha256:c1bc06d4e5a22837586d3d7ee07922f26b4cd025687cdfd7e3e3789e1c5bdf3d",
                    "enabled": 1
                }
            },
            "openvpn": {
                "100": {
                    "checksum": "sha256:52f3581fc457419ac8fc34f647a7f609968619c59321b66d1c5dedb9a5075c82",
                    "enabled": 1
                }
            },
            "openvswitch": {
                "100": {
                    "checksum": "sha256:2b17c37a62e1a4a3deb1f56ce7fa846cfecf59f2b43c519f2380546418aca5fa",
                    "enabled": 1
                }
            },
            "openwsman": {
                "100": {
                    "checksum": "sha256:504b13310e16a22488e3bece96c6b2c8b2f1ab3b4ac0c75939ebd7d208e4019b",
                    "enabled": 1
                }
            },
            "oracleasm": {
                "100": {
                    "checksum": "sha256:ce88af5ce9ef09a298e9636753eb76703a1fba26e8831b3dffe07e00257fcd0a",
                    "enabled": 1
                }
            },
            "osad": {
                "100": {
                    "checksum": "sha256:c4e2d7ea074b383e96dee16c4be220d132301c03dcca3bd1698195f1d28c749b",
                    "enabled": 1
                }
            },
            "pads": {
                "100": {
                    "checksum": "sha256:23da314c2255bb1c19dad65a242681330d6bdddc2ba4a0b80ae2ad2249c0a52b",
                    "enabled": 1
                }
            },
            "passenger": {
                "100": {
                    "checksum": "sha256:75e644cc79f5447d17ed5461e52b37f29dc86806320b0ef562b2acd8e2870121",
                    "enabled": 1
                }
            },
            "pcm": {
                "100": {
                    "checksum": "sha256:ca821334ad63d1810d993cdd416072a93f57f17b8876a151b738ee992c59ccbd",
                    "enabled": 1
                }
            },
            "pcmcia": {
                "100": {
                    "checksum": "sha256:819812bc0e7f5c0dadc824e330aeb912ed29face4a904beb7bdc8830dc0a49a5",
                    "enabled": 1
                }
            },
            "pcp": {
                "100": {
                    "checksum": "sha256:bb61951b39b8975478348185eb3050c815930726687b2a3a4c0ac4427ce614ac",
                    "enabled": 1
                }
            },
            "pcscd": {
                "100": {
                    "checksum": "sha256:b3393bddbd1de42df51c9d3483169c2fe89f167761b0fd3829ecf277b6c7c3d6",
                    "enabled": 1
                }
            },
            "pdns": {
                "100": {
                    "checksum": "sha256:846907e70298de291c325ea43e4215314c08433a5b70111b90f7a3eecf7f3e83",
                    "enabled": 1
                }
            },
            "pegasus": {
                "100": {
                    "checksum": "sha256:51cdabfd0ee2f5b3df09cb7a8132dc0d2f7953e6c00782f87d95779cc29e02bd",
                    "enabled": 1
                }
            },
            "permissivedomains": {
                "100": {
                    "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2",
                    "enabled": 1
                }
            },
            "pesign": {
                "100": {
                    "checksum": "sha256:597ab5f0f472ea2f2aca1b3754c2457cc7769f79daaec0856ad2675e53867b80",
                    "enabled": 1
                }
            },
            "pingd": {
                "100": {
                    "checksum": "sha256:7a7013be4a046d0ba770988e530fba59b3ce8c8159780af1a5001e5fad8ba042",
                    "enabled": 1
                }
            },
            "pkcs": {
                "100": {
                    "checksum": "sha256:ce51587d33ffe41699200700a15f92892a181c7e52aa451568a740dffd6eaeab",
                    "enabled": 1
                }
            },
            "pki": {
                "100": {
                    "checksum": "sha256:611152fa77dc465824018a6b6e2687f2a1e483f92e20d9754c33f8032d7a210d",
                    "enabled": 1
                }
            },
            "plymouthd": {
                "100": {
                    "checksum": "sha256:58fd323d88ba1b9f753102be767c848248d30c65a83fb30f2e13ba0073f10ab5",
                    "enabled": 1
                }
            },
            "podsleuth": {
                "100": {
                    "checksum": "sha256:ac303c7ac224ff7fec004f7db6bdc6cabc5e9fbf1df3e27ab454f3f87de18b8d",
                    "enabled": 1
                }
            },
            "policykit": {
                "100": {
                    "checksum": "sha256:10852eded77ba66158385a63efa346a0cab9305fab8710f7992b951c671569be",
                    "enabled": 1
                }
            },
            "polipo": {
                "100": {
                    "checksum": "sha256:e30571ba606507dd4439ea7b455a115170923229f105614f0b044ed07a8ba832",
                    "enabled": 1
                }
            },
            "portmap": {
                "100": {
                    "checksum": "sha256:26185c5731c3401ab5665cbfefa89268e91f21bb8f0fd88de3602d013ab6f00f",
                    "enabled": 1
                }
            },
            "portreserve": {
                "100": {
                    "checksum": "sha256:0caead9c9effa3c022f6e4740ac907788a51468996fb369574b04a50dd29eb59",
                    "enabled": 1
                }
            },
            "postfix": {
                "100": {
                    "checksum": "sha256:011e57b206775738cc6fb16e48e9ed222ed67928857cc0e0d19f02e039757e33",
                    "enabled": 1
                }
            },
            "postgresql": {
                "100": {
                    "checksum": "sha256:dd827cd694ba2389e77fee7137a741677590dc56dff0015fa6b58b93ab354c7c",
                    "enabled": 1
                }
            },
            "postgrey": {
                "100": {
                    "checksum": "sha256:1bc24bad910fe07f8737b25896d73d035cd83603897f3ae236bac28a95e49189",
                    "enabled": 1
                }
            },
            "powerprofiles": {
                "100": {
                    "checksum": "sha256:8b4e744993aa801db3b0e1f629a104e1ef934b6597d2789f8ec15ecba467001a",
                    "enabled": 1
                }
            },
            "ppp": {
                "100": {
                    "checksum": "sha256:ffb0f1240b1f95504b83f86a52dcd562df79cf9745b52a859db025821501ba83",
                    "enabled": 1
                }
            },
            "prelink": {
                "100": {
                    "checksum": "sha256:eda9cb2209b0e28b17127136f79559febd986fbc4e0180751eb160fd72fd8310",
                    "enabled": 1
                }
            },
            "prelude": {
                "100": {
                    "checksum": "sha256:6e31f7b2379cc4c461bdc1a826cbe607b72b997960c8304ccfa6256bea0b3501",
                    "enabled": 1
                }
            },
            "privoxy": {
                "100": {
                    "checksum": "sha256:8ac7f57a98159bee255928841806174b9c3fd760f74b2e623f5622ccfb049c11",
                    "enabled": 1
                }
            },
            "procmail": {
                "100": {
                    "checksum": "sha256:0c7b790422f9c4e5d531e8e1de2cd5f8b128223e2482cdf5695394a1ba833e1c",
                    "enabled": 1
                }
            },
            "prosody": {
                "100": {
                    "checksum": "sha256:360e32fc4cb037faf2b5684ce2d376c42cff14785fc95d82fd750fe66dabe1e2",
                    "enabled": 1
                }
            },
            "psad": {
                "100": {
                    "checksum": "sha256:17e628d6a7692372580df6800a972170a1c36c975fab437e559c3afdbfc0c4ee",
                    "enabled": 1
                }
            },
            "ptchown": {
                "100": {
                    "checksum": "sha256:861c6b803141841f4756be8531775c7d37e8ec378b2b9e90f37e1932d35e5e36",
                    "enabled": 1
                }
            },
            "pulseaudio": {
                "100": {
                    "checksum": "sha256:02a332ab93358096dfeafa4b77d9ca1c8dcf79fe6ab9b18150646f1a2efee4dc",
                    "enabled": 1
                }
            },
            "puppet": {
                "100": {
                    "checksum": "sha256:d364337a160a2218afdc92eab0cd27e057543614dfcf96f2d74d51283cda1589",
                    "enabled": 1
                }
            },
            "pwauth": {
                "100": {
                    "checksum": "sha256:4cabb0c5c75a395579d7523d3e7616db9fb0e1f40d3b9f581f6e94eebb049810",
                    "enabled": 1
                }
            },
            "qatlib": {
                "100": {
                    "checksum": "sha256:6cf80a3a427ac287351081934516c8e300fa9929703b290df47ce87df99ec9de",
                    "enabled": 1
                }
            },
            "qmail": {
                "100": {
                    "checksum": "sha256:7972bb152e68b8fdb1a77c84868b66e420365c9f526254fb272a7263500acbff",
                    "enabled": 1
                }
            },
            "qpid": {
                "100": {
                    "checksum": "sha256:001454c6d18d7f75e02524e3559c91264b4711cff8c4417ea2580b1c3e0a8576",
                    "enabled": 1
                }
            },
            "quantum": {
                "100": {
                    "checksum": "sha256:a25e72eac82204e6200ec843bc06d7b59a1ce7c755666bf1ed9effd12ee466d7",
                    "enabled": 1
                }
            },
            "quota": {
                "100": {
                    "checksum": "sha256:68aca8ea777ce2a13e5d8c03b95805f3e77a4f963cbc1d960fd0162883b6083f",
                    "enabled": 1
                }
            },
            "rabbitmq": {
                "100": {
                    "checksum": "sha256:6ae09222a0b9aefa3ead90b1f1ee08972570b1f5e39d25c05108228e4d315d50",
                    "enabled": 1
                }
            },
            "radius": {
                "100": {
                    "checksum": "sha256:492c565eb90097f567381751dff3cdf0ae9a6db7f298dd13cd04bd67522789f6",
                    "enabled": 1
                }
            },
            "radvd": {
                "100": {
                    "checksum": "sha256:36cad8d8ea113dd1a761b8b386bd0a057883acc65b2c60140c2c001fc04e2e43",
                    "enabled": 1
                }
            },
            "raid": {
                "100": {
                    "checksum": "sha256:a0f7c18b6066e9bbf2454c92917e0abeff542fc804a524a95a02583b0ca22bb1",
                    "enabled": 1
                }
            },
            "rasdaemon": {
                "100": {
                    "checksum": "sha256:a966760e6a561f6eb4789d36b88bb1879c965b851e02d834c3304f0d2987461d",
                    "enabled": 1
                }
            },
            "rdisc": {
                "100": {
                    "checksum": "sha256:36bafd2cac1aeb59eb2e2a4a4a4f9d27b1594a92df1727a8977c83dca6292400",
                    "enabled": 1
                }
            },
            "readahead": {
                "100": {
                    "checksum": "sha256:ec74b1ede394f0c1ab75a5439b4744a261b658edc17d5a0353ca273e397d2156",
                    "enabled": 1
                }
            },
            "realmd": {
                "100": {
                    "checksum": "sha256:4d1f0907111fb4fcc242d7d91574e273650c86d2207f870e4c47036d900e4d80",
                    "enabled": 1
                }
            },
            "redis": {
                "100": {
                    "checksum": "sha256:dc0f28028ab595f363bf52bd9beda91f67d0639e65fb767431f8675371aa4415",
                    "enabled": 1
                }
            },
            "remotelogin": {
                "100": {
                    "checksum": "sha256:31f6f5efb0759335de46ac3ab4c8a64955f838afc9743a20f2e81a8cb54cb36a",
                    "enabled": 1
                }
            },
            "restraint": {
                "400": {
                    "checksum": "sha256:79ddcfa70f63175b01a14ce2e66542c5d30d799f492f0ef148fa439a675eaeb9",
                    "enabled": 1
                }
            },
            "rhcd": {
                "100": {
                    "checksum": "sha256:de093c51d11ab2de52b41d550d06ce7def91a9e8c0f507884086d4d34343ae40",
                    "enabled": 1
                }
            },
            "rhcs": {
                "100": {
                    "checksum": "sha256:fdab19b51617ba1dfc3a402dbc261452cb23c74be402208563608610e6a6111a",
                    "enabled": 1
                }
            },
            "rhgb": {
                "100": {
                    "checksum": "sha256:4b222d1ae2a6931560eca08e087c56c7835ce79c3f82514c5c6d3e98a7de89b1",
                    "enabled": 1
                }
            },
            "rhnsd": {
                "100": {
                    "checksum": "sha256:ccef03c795df14786a6d323f735e2ec58f9270805e7de9fe924549c98b24e6f4",
                    "enabled": 1
                }
            },
            "rhsmcertd": {
                "100": {
                    "checksum": "sha256:63eb81ab3a26272e180e1b64f7d2d28e65ed7fd3a2e261dbba40c0fd4d82ecfa",
                    "enabled": 1
                }
            },
            "rhts": {
                "400": {
                    "checksum": "sha256:e92c439347ea9241cc5bce5f27844b9356dd7502bec0b93cb503aeedfeea5cbf",
                    "enabled": 1
                }
            },
            "ricci": {
                "100": {
                    "checksum": "sha256:ce73d254e6271e7e012d38da6ff05b7c66c215b8ea5b99161ab1e97c77dfc31a",
                    "enabled": 1
                }
            },
            "rkhunter": {
                "100": {
                    "checksum": "sha256:5f31c86e2f2cc425040785cc22a9040c7cdb80bd1145dfd668d2b2597534d6e3",
                    "enabled": 1
                }
            },
            "rlogin": {
                "100": {
                    "checksum": "sha256:819c365b1e88bdb1eb171c48fbcc2b2e9c2ea405d7c20024919aa7d8bcb28de4",
                    "enabled": 1
                }
            },
            "rngd": {
                "100": {
                    "checksum": "sha256:0ea0c16871f23b263aaaee590ecbad1d9095d01c7b7b6cd11a80278d1257957f",
                    "enabled": 1
                }
            },
            "roundup": {
                "100": {
                    "checksum": "sha256:931c928c6bf89625bce5b6cb76084615486f91d6e86a09fcaf007c47c5e68ba7",
                    "enabled": 1
                }
            },
            "rpc": {
                "100": {
                    "checksum": "sha256:6f9c4e389ebc07f95aea12e6f307f298835df92edeefd85d152db11600f22620",
                    "enabled": 1
                }
            },
            "rpcbind": {
                "100": {
                    "checksum": "sha256:6f9c39f5bd3e6b092668476aa1233130cc9b9edd04c66af9d667dbf9d5f48bc6",
                    "enabled": 1
                }
            },
            "rpm": {
                "100": {
                    "checksum": "sha256:416b5e6a242327091bb9fa10b833665b68fe9ab80dc8cc494bf3726cfac0eeb7",
                    "enabled": 1
                }
            },
            "rrdcached": {
                "100": {
                    "checksum": "sha256:340e748f2da66a79889ffe2d77224bce0090a2954743d5c1098ac3cb1eb48866",
                    "enabled": 1
                }
            },
            "rshd": {
                "100": {
                    "checksum": "sha256:3cb2ccaf8367e6a99384582a9c902e2af4480b04d8bc56ab5562c7d3111cd323",
                    "enabled": 1
                }
            },
            "rshim": {
                "100": {
                    "checksum": "sha256:190e4586cea0d0894209365e8a71c0b4a6467e83bebc8bac649644695f209ac8",
                    "enabled": 1
                }
            },
            "rssh": {
                "100": {
                    "checksum": "sha256:26f6c19589d58fd23c303ac699697517d6883a9531837ad406e2f09b7507278d",
                    "enabled": 1
                }
            },
            "rsync": {
                "100": {
                    "checksum": "sha256:0aa06de248b996ddd0afc67811e82a96bde2dc7a2c328ecbbf6c6a5c9c780784",
                    "enabled": 1
                }
            },
            "rtas": {
                "100": {
                    "checksum": "sha256:06ca74bda1764a6f8a241b370f13bdf438674dac0dfbc30f98facc0884b190ea",
                    "enabled": 1
                }
            },
            "rtkit": {
                "100": {
                    "checksum": "sha256:d8e666993d2c3c43a5efc6628d04fed230f380cec2feafc9fb1eb305239ad954",
                    "enabled": 1
                }
            },
            "rwho": {
                "100": {
                    "checksum": "sha256:8c99bd9b6a8fc9a4dd9e34af66adfe117bdb4b2e88a7f1f2e6a319db450168e8",
                    "enabled": 1
                }
            },
            "samba": {
                "100": {
                    "checksum": "sha256:163c4b641e4392ea1a68a9dd74bad9c8cddb9874be030b007f995c79969b0024",
                    "enabled": 1
                }
            },
            "sambagui": {
                "100": {
                    "checksum": "sha256:81bbb3bc281f6887174beeba44d970d3edc40d3fbad85ed8b9d03528800cb38a",
                    "enabled": 1
                }
            },
            "sandboxX": {
                "100": {
                    "checksum": "sha256:73a9063f298fd65b376ac709fb599ef1eefccb39b09360c412946fa507cf53ba",
                    "enabled": 1
                }
            },
            "sanlock": {
                "100": {
                    "checksum": "sha256:20c66e776cec92678d49c6e41eb804981e3769b8b595c5c7a8a1f94bf8f5a167",
                    "enabled": 1
                }
            },
            "sap": {
                "100": {
                    "checksum": "sha256:9886dbb3eebb514446605e62fcc6969a7d5a56f1f3b84129e7b46e20ab9de366",
                    "enabled": 1
                }
            },
            "sasl": {
                "100": {
                    "checksum": "sha256:cbd713a58cf9ff07435b0a6210df156c4db5e4435b3d51fdf60627877e42e961",
                    "enabled": 1
                }
            },
            "sbd": {
                "100": {
                    "checksum": "sha256:30936f11af78b1ed885d6deaa16025623fc794c54e452fc02c056e388dc263ce",
                    "enabled": 1
                }
            },
            "sblim": {
                "100": {
                    "checksum": "sha256:6bee6346bb885fc64e8579218dda7100aac5a61d6ad1e474cae7bcce8448dd1a",
                    "enabled": 1
                }
            },
            "screen": {
                "100": {
                    "checksum": "sha256:93aae106e0590c3b6ce75475c86101e20b244878b7f659dc9a9638529282bffb",
                    "enabled": 1
                }
            },
            "secadm": {
                "100": {
                    "checksum": "sha256:e69f3497fc89f1f96a3ecff77a12119b6abb012d4fa5973b9f33eeadbd12319b",
                    "enabled": 1
                }
            },
            "sectoolm": {
                "100": {
                    "checksum": "sha256:35e4d2a3208b8c0d74c4016309c3447efac46618ce4209c78af9861f95cf36c6",
                    "enabled": 1
                }
            },
            "selinuxutil": {
                "100": {
                    "checksum": "sha256:56c793ddc8f620fec42c9bc28c618e928882542b0de95adda2c8918742f890b2",
                    "enabled": 1
                }
            },
            "sendmail": {
                "100": {
                    "checksum": "sha256:e04e7d214022fd792eeb66b3426e0eef2dc6018b06005497b849bd2b5229ee81",
                    "enabled": 1
                }
            },
            "sensord": {
                "100": {
                    "checksum": "sha256:49a17300f038a4023d478e0031f9a6ef18cf6817e696abed1b48983ef75366bf",
                    "enabled": 1
                }
            },
            "setrans": {
                "100": {
                    "checksum": "sha256:df3f3357b0e78a2e2b110ab6e9dc9907d83d296e37bfd8c493b0ddf11a8d4e75",
                    "enabled": 1
                }
            },
            "setroubleshoot": {
                "100": {
                    "checksum": "sha256:e10d1bfe87e505f003dcadb0f5f384e7db1bc6da48cd705a78c0630ef0a77207",
                    "enabled": 1
                }
            },
            "seunshare": {
                "100": {
                    "checksum": "sha256:f762b9090a0dc16cdb04e108f47a696170cebc74d489930915380689ade87d74",
                    "enabled": 1
                }
            },
            "shorewall": {
                "100": {
                    "checksum": "sha256:a55e30390b7305a2e01d2e4be3786edc8c763f43c249ae07889c82c84cc7b9f3",
                    "enabled": 1
                }
            },
            "slocate": {
                "100": {
                    "checksum": "sha256:962e24ad1b72e7fc878a7ed7a46c5c72b3db9d4cc13aac4ea1f6c12b2ce22700",
                    "enabled": 1
                }
            },
            "slpd": {
                "100": {
                    "checksum": "sha256:21194bcdd2babe7aeaac215e7993d75f23bb0ed09e6378d222461113221cb7cb",
                    "enabled": 1
                }
            },
            "smartmon": {
                "100": {
                    "checksum": "sha256:a705982432ea6ff26451fdf015e5247103ca90e8f1350357ea956812f60d55d0",
                    "enabled": 1
                }
            },
            "smokeping": {
                "100": {
                    "checksum": "sha256:71a49275d91a3678a30ee048c48f3fe3222b116d94d9a1f5eafe2e6962e0cbcf",
                    "enabled": 1
                }
            },
            "smoltclient": {
                "100": {
                    "checksum": "sha256:95c7ee276c5baae8d1a63c94a349ea997f421cebdf73d013ab4b90b34d986fd0",
                    "enabled": 1
                }
            },
            "snapper": {
                "100": {
                    "checksum": "sha256:babce05f61b6663c8c86269b7211318299807d739d246579ead6e6569dbea4aa",
                    "enabled": 1
                }
            },
            "snmp": {
                "100": {
                    "checksum": "sha256:5bfb4dae5517d7315d7e1ba509c0935ac06bfc25f70dfde91f31e44fc4d3528c",
                    "enabled": 1
                }
            },
            "snort": {
                "100": {
                    "checksum": "sha256:4ad1c6753ac53dddc1a2f618fb2eda212e114a474a31cf98840c0048e36514ce",
                    "enabled": 1
                }
            },
            "sosreport": {
                "100": {
                    "checksum": "sha256:55af58727c8e1a2d5ffc0eeebfabac14030d6594aaf36eae10c51d3f43fd7c94",
                    "enabled": 1
                }
            },
            "soundserver": {
                "100": {
                    "checksum": "sha256:725e2158052da4d2b9499d63570437f35d317f541e6fd0a7b6841f88be179689",
                    "enabled": 1
                }
            },
            "spamassassin": {
                "100": {
                    "checksum": "sha256:e653084023932281ca276c722b79aaecbc5d81e97384373a73a4de2f4d3da9f9",
                    "enabled": 1
                }
            },
            "speech-dispatcher": {
                "100": {
                    "checksum": "sha256:b08147b36ed3fa54428c34c8ac8e2781717ecb453c2372760d41a7738b7757da",
                    "enabled": 1
                }
            },
            "squid": {
                "100": {
                    "checksum": "sha256:326995099022aae4699bd16a731b01145d48811e2be43eb3ac04d688f2350707",
                    "enabled": 1
                }
            },
            "ssh": {
                "100": {
                    "checksum": "sha256:2e60fd12e5d79df288e99630ef5730a06addb7364f562a1b2d9a99907c3e1729",
                    "enabled": 1
                }
            },
            "sslh": {
                "100": {
                    "checksum": "sha256:cdacc7306b49ca27f4eed4dda7332bca872cf309459831c96694293721ed74d3",
                    "enabled": 1
                }
            },
            "sssd": {
                "100": {
                    "checksum": "sha256:90e7ed3384bdd4a77b0b3c9d4a6cef0e2bdb8efbf9be9f75ddf3fdb3383e92e8",
                    "enabled": 1
                }
            },
            "staff": {
                "100": {
                    "checksum": "sha256:dac72e008d8b6f68d1d9b5d417687a655dfb7d410378c5a3acacc391ec6061ba",
                    "enabled": 1
                }
            },
            "stalld": {
                "100": {
                    "checksum": "sha256:9ef028437851996db7b3152ef672f991fde6cd464194eef73c1670c33cdb274f",
                    "enabled": 1
                }
            },
            "stapserver": {
                "100": {
                    "checksum": "sha256:0a08f155a5545909cceeb2c2221dcee1980385b52a4afd3f8b8f6704617d14a5",
                    "enabled": 1
                }
            },
            "stratisd": {
                "100": {
                    "checksum": "sha256:46468db34c31c668f4b213b8ed14fcfb53e1e183431f6237364acf686d83b8f0",
                    "enabled": 1
                }
            },
            "stunnel": {
                "100": {
                    "checksum": "sha256:9428aee5be7ef002eba8ade18418e1b436e2f8ed0479182b47822318c4571923",
                    "enabled": 1
                }
            },
            "su": {
                "100": {
                    "checksum": "sha256:71748ab8b3ac17e7f16325adc3575242076dc6f80ea6797091fb5ec54bdb4a49",
                    "enabled": 1
                }
            },
            "sudo": {
                "100": {
                    "checksum": "sha256:c274be08afec52e985bfc508199ef983f1f2eab41bf9b72b0921aa0276e47a51",
                    "enabled": 1
                }
            },
            "svnserve": {
                "100": {
                    "checksum": "sha256:2a78595b73c7ea25c5b395ec91f18b3dad58002dd8ef3652d69edd5a8c13f3f5",
                    "enabled": 1
                }
            },
            "swift": {
                "100": {
                    "checksum": "sha256:ccacc18e643d0ca081b36d910abb0ad6fae2acdd1f92a52b4fc9004fc31f4677",
                    "enabled": 1
                }
            },
            "sysadm": {
                "100": {
                    "checksum": "sha256:45baef440c9703c9cea98a50b924693afd2e182705f10ec9be45fbf7ab9dde19",
                    "enabled": 1
                }
            },
            "sysadm_secadm": {
                "100": {
                    "checksum": "sha256:436746a23ca13b58adc9e2a051b59463b8398cecb114edd411fcde4fce182148",
                    "enabled": 1
                }
            },
            "sysnetwork": {
                "100": {
                    "checksum": "sha256:a9ec2dd40baab31ae84049839e2043c24a475f6d9f1daf06f1c2919a4f01eae3",
                    "enabled": 1
                }
            },
            "sysstat": {
                "100": {
                    "checksum": "sha256:e35d3df921d581298273023c20b12e4d1168c249b06e08458a27732a4ff082ce",
                    "enabled": 1
                }
            },
            "systemd": {
                "100": {
                    "checksum": "sha256:e4b3f11e361e4faa3edeac924d2ed2ee41ceb3ea33e93c7ac5337b493a459bc7",
                    "enabled": 1
                }
            },
            "systemd-homed": {
                "100": {
                    "checksum": "sha256:aa66cb9052b66f23139475f26f496171f106858f85787fe08d2de8a53129d0ff",
                    "enabled": 1
                }
            },
            "tangd": {
                "100": {
                    "checksum": "sha256:b09965ae1db2d4aeab2d1c8775897288e6d55224183205ed48002daa816d2bbd",
                    "enabled": 1
                }
            },
            "targetd": {
                "100": {
                    "checksum": "sha256:8d31e97d05e23cbca57938b58d725c33af5e6aaa3e563971a398936cd3a21b36",
                    "enabled": 1
                }
            },
            "tcpd": {
                "100": {
                    "checksum": "sha256:118bab50d3722356fda7ca0983781aecd45f2a0979bc56617be3a8e044663451",
                    "enabled": 1
                }
            },
            "tcsd": {
                "100": {
                    "checksum": "sha256:e116438dee0dab9b191c25cfcb43a7dc0c8ac18bdb920e0596267365784b43c7",
                    "enabled": 1
                }
            },
            "telepathy": {
                "100": {
                    "checksum": "sha256:98fac790af3d7a87e75899e112ba5d4cd2455261e44b60f1a0d7387ba0e0ad49",
                    "enabled": 1
                }
            },
            "telnet": {
                "100": {
                    "checksum": "sha256:3bcf922688dc95cd19df39b5b48017dc48fca501bb70a953f99abce2c6fe7345",
                    "enabled": 1
                }
            },
            "tftp": {
                "100": {
                    "checksum": "sha256:368e98dbdf56054db821272c38ae2aa3a92d88b419ebf886cfbf33db1cc4177d",
                    "enabled": 1
                }
            },
            "tgtd": {
                "100": {
                    "checksum": "sha256:c07be819f426ec0bbba6ba8f8f1a0a57d6a3e02064bde55d0243cf21d24dce13",
                    "enabled": 1
                }
            },
            "thin": {
                "100": {
                    "checksum": "sha256:9825f5ea5ecf0720ae08c5fb7a50d3318b3dfb520801cf5ec8c0663364df5a62",
                    "enabled": 1
                }
            },
            "thumb": {
                "100": {
                    "checksum": "sha256:257fdf568345e137e4823a8151907144da46019ac547eec7de29c052e8b23a8a",
                    "enabled": 1
                }
            },
            "tlp": {
                "100": {
                    "checksum": "sha256:93f4e9c278aa30c7ce490f4b9c1d6238ab0d16f662fc880049398d241e429c47",
                    "enabled": 1
                }
            },
            "tmpreaper": {
                "100": {
                    "checksum": "sha256:83f45391b2a56e868ed635df8adfd7a461c853d99c92cb0839d24d560652accb",
                    "enabled": 1
                }
            },
            "tomcat": {
                "100": {
                    "checksum": "sha256:4b9b1022c21e3cc4b90416961b2e5990838532561080f53a962b14cf1101012e",
                    "enabled": 1
                }
            },
            "tor": {
                "100": {
                    "checksum": "sha256:5019fa69d2c211fff26780ddad52079881cffe4688af6407b7ee03ff499339d9",
                    "enabled": 1
                }
            },
            "tuned": {
                "100": {
                    "checksum": "sha256:58e9695125a89de4faee0ac5a37a1a26067c97b6679d8350de744bd5d21257c8",
                    "enabled": 1
                }
            },
            "tvtime": {
                "100": {
                    "checksum": "sha256:2d5107d1b764ddd411e4abf44d7b3a2770c192bf0d0e19c88856d6593f0f2891",
                    "enabled": 1
                }
            },
            "udev": {
                "100": {
                    "checksum": "sha256:494dde6bce44e76f5a7f70f98e178e7c7e0d195dae293fa631e97c5d5be7ce75",
                    "enabled": 1
                }
            },
            "ulogd": {
                "100": {
                    "checksum": "sha256:bb5057eabb1e1e690c8c4e2a19c5014a00240538d21fcd3b835b7a9937dcb469",
                    "enabled": 1
                }
            },
            "uml": {
                "100": {
                    "checksum": "sha256:c05987c6a9f49b3370c011d79431dc52d6d435e89a577d7d10a8db02587a3786",
                    "enabled": 1
                }
            },
            "unconfined": {
                "100": {
                    "checksum": "sha256:af677713dbda91ea9db3e438de628e08ceb5e9c18b2fb5e338b9b81f79f5fa71",
                    "enabled": 1
                }
            },
            "unconfineduser": {
                "100": {
                    "checksum": "sha256:1b350a7c7089cab7a19adba6f1d357711f41c8dc1fa015676c64279b89b700cd",
                    "enabled": 1
                }
            },
            "unlabelednet": {
                "100": {
                    "checksum": "sha256:1b37d8de7cf505da7e184ad33a2d04904d9fc51d64707b5d7e449b9d615deaa2",
                    "enabled": 1
                }
            },
            "unprivuser": {
                "100": {
                    "checksum": "sha256:51944913c44942cbf3cac0e3c28c845a18a5d8e978ef82551de32f75dc29b045",
                    "enabled": 1
                }
            },
            "updfstab": {
                "100": {
                    "checksum": "sha256:d86b9c14ea814e61185eb2fd53bdda95933b44eebf98ba942cbae61b3b16ce2b",
                    "enabled": 1
                }
            },
            "usbmodules": {
                "100": {
                    "checksum": "sha256:db8f8c1a3ad46b9b9fbfb777e047e3eb0ca6e1e4be62c1cb16292b934d218b4a",
                    "enabled": 1
                }
            },
            "usbmuxd": {
                "100": {
                    "checksum": "sha256:ba9efc2628a81ac2c95346cb46f79b57748b56575197830bdc0fe9f1af6e6d07",
                    "enabled": 1
                }
            },
            "userdomain": {
                "100": {
                    "checksum": "sha256:5e67ab1399f57e7c3f01a4d83877bc1d33cfc458959fd696457736fd7da65e3b",
                    "enabled": 1
                }
            },
            "userhelper": {
                "100": {
                    "checksum": "sha256:040a092ace1fddcfc244268cf1a30bb7935fcb1b493db9b22a3e4919ace8c00f",
                    "enabled": 1
                }
            },
            "usermanage": {
                "100": {
                    "checksum": "sha256:6b4b9f3c433d31a1416ed75a584f90616aa6b335297d5ef845ff3f71d36c3cfe",
                    "enabled": 1
                }
            },
            "usernetctl": {
                "100": {
                    "checksum": "sha256:b50724f82867d86cb5ed896fed12ac8a5d939990f9e92221b282d63d44c13fa9",
                    "enabled": 1
                }
            },
            "uucp": {
                "100": {
                    "checksum": "sha256:b51b9edc8ba92cbdbbac99877b4388d9397976992ba7f5d4c467627763445ea4",
                    "enabled": 1
                }
            },
            "uuidd": {
                "100": {
                    "checksum": "sha256:4e1a2aeb4ae247cde28cc50db01cc6043bb78c45763eb8160a957be55a32a2d2",
                    "enabled": 1
                }
            },
            "varnishd": {
                "100": {
                    "checksum": "sha256:b9162e79ac68e0ab17b12ffb6d5c3d9e095d0b2b81ffb8c70b3ebd6531b32b0e",
                    "enabled": 1
                }
            },
            "vdagent": {
                "100": {
                    "checksum": "sha256:af8d12d85265012b374756768440f273481308b2e90fbba97b8afecb9bd2574d",
                    "enabled": 1
                }
            },
            "vhostmd": {
                "100": {
                    "checksum": "sha256:97d72614bfbdb0146d8f7c90b90e9549ab1d202ffc55066ce79464e4a6a20cdf",
                    "enabled": 1
                }
            },
            "virt": {
                "100": {
                    "checksum": "sha256:36807d88383d73cb0f627bc0a6ccfaaf79e989659fec8a2adabfc95d6a18a988",
                    "enabled": 1
                }
            },
            "virt_supplementary": {
                "100": {
                    "checksum": "sha256:738a6f720645dec9393904cc8f26bc66447c7451862d73f8d40ce74b1887e9b7",
                    "enabled": 1
                }
            },
            "vlock": {
                "100": {
                    "checksum": "sha256:a01e7aa92da5f24789286ac6530246cec5ed9c8675493cef2b06a7a7be595958",
                    "enabled": 1
                }
            },
            "vmtools": {
                "100": {
                    "checksum": "sha256:e8bac770b13be7d27c9b9286effe5e50e330e9ab1a225e06890edbf33798bf89",
                    "enabled": 1
                }
            },
            "vmware": {
                "100": {
                    "checksum": "sha256:00fa1441d6e08f5af70bcea53ebf1f3e4c12a9737f1ef1e74fd5b79957d7f042",
                    "enabled": 1
                }
            },
            "vnstatd": {
                "100": {
                    "checksum": "sha256:2b80169a6f39f4ebbc1c2d9b0211eec558da2a8eaa6bef904b9fc66eda7122dc",
                    "enabled": 1
                }
            },
            "vpn": {
                "100": {
                    "checksum": "sha256:4c7963f955ff610de43933a480c47fd50da6b4d88f03ed6bbf4663dec25c3cae",
                    "enabled": 1
                }
            },
            "w3c": {
                "100": {
                    "checksum": "sha256:6b19f10f4a51f6f1f343b082d4a96c3335b191f00d41152ae090627f727b8360",
                    "enabled": 1
                }
            },
            "watchdog": {
                "100": {
                    "checksum": "sha256:4299fadc4f28a9e6ee9b9e5a7688955752f1bf36ef83656fee6403bbfe10e0b0",
                    "enabled": 1
                }
            },
            "wdmd": {
                "100": {
                    "checksum": "sha256:c3973da0b3e1836af273137bc8ed86a443da6a73d22255aa1135582923347b2c",
                    "enabled": 1
                }
            },
            "webadm": {
                "100": {
                    "checksum": "sha256:8f262e982526c5e63cc93fe9806380aef291c0fa06d8c6468c1df126325ae83c",
                    "enabled": 1
                }
            },
            "webalizer": {
                "100": {
                    "checksum": "sha256:43f69260eb8c399ceb227825b190ee1758a60b1b78052b1fbfe21d5fce8daab5",
                    "enabled": 1
                }
            },
            "wine": {
                "100": {
                    "checksum": "sha256:204369c49480adaf6c3bbee72b72ec17a5fac47821e371ac9b0e633666af050e",
                    "enabled": 1
                }
            },
            "wireguard": {
                "100": {
                    "checksum": "sha256:7ce468b04b03ed26fb5ebadc5020c3fd578c678d97337d1d8afa471bf472c6d5",
                    "enabled": 1
                }
            },
            "wireshark": {
                "100": {
                    "checksum": "sha256:e1b5a6483deac005288672619780282eb889108244815c872e59166ae8df750a",
                    "enabled": 1
                }
            },
            "xen": {
                "100": {
                    "checksum": "sha256:59c9e307dfc787b4b7510d9d593b71792f9f147884418eed41983034fb35059f",
                    "enabled": 1
                }
            },
            "xguest": {
                "100": {
                    "checksum": "sha256:9e7016f3dc0c1a18e4164c5d5aef1e60e440b596ac6976770277ba48f02545df",
                    "enabled": 1
                }
            },
            "xserver": {
                "100": {
                    "checksum": "sha256:9e045e0be2544e2241f3b8132ceb2e1720b20cbe1fb5b017c4a9a97e2b5b8bd2",
                    "enabled": 1
                }
            },
            "zabbix": {
                "100": {
                    "checksum": "sha256:b8950107bebcd555f2c48ee06bc63bb1329a6ea5db7a762cbef5ad9be656c9da",
                    "enabled": 1
                }
            },
            "zarafa": {
                "100": {
                    "checksum": "sha256:b1738ace3c35a58867613fabb433a761136afae86bab322ea4d192436c5b0ddd",
                    "enabled": 1
                }
            },
            "zebra": {
                "100": {
                    "checksum": "sha256:475a888f43d0521ac189ff49430864dc8b00f04c244d42100ca1d637860ff4fc",
                    "enabled": 1
                }
            },
            "zoneminder": {
                "100": {
                    "checksum": "sha256:19a33723d291446ee9617d0120088d7bae884e5a963c48a8afec20fc6bacc4bc",
                    "enabled": 1
                }
            },
            "zosremote": {
                "100": {
                    "checksum": "sha256:804524b0b86a951c8a5b1e2220cf19f3504fd7442e463a8ac8852babedf836c7",
                    "enabled": 1
                }
            }
        },
        "selinux_priorities": true
    },
    "changed": false
}

TASK [fedora.linux_system_roles.selinux : Load SELinux modules] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115
Saturday 08 February 2025  18:38:07 -0500 (0:00:03.686)       0:07:21.619 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "selinux_modules is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128
Saturday 08 February 2025  18:38:07 -0500 (0:00:00.274)       0:07:21.894 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136
Saturday 08 February 2025  18:38:07 -0500 (0:00:00.053)       0:07:21.948 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14
Saturday 08 February 2025  18:38:07 -0500 (0:00:00.093)       0:07:22.041 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19
Saturday 08 February 2025  18:38:08 -0500 (0:00:00.506)       0:07:22.547 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__nbde_server_tangd_dir_stat.stat.exists",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35
Saturday 08 February 2025  18:38:08 -0500 (0:00:00.112)       0:07:22.659 ***** 
changed: [managed-node3] => {
    "changed": true,
    "gid": 0,
    "group": "root",
    "mode": "0755",
    "owner": "root",
    "path": "/etc/systemd/system/tangd.socket.d",
    "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0",
    "size": 4096,
    "state": "directory",
    "uid": 0
}

TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44
Saturday 08 February 2025  18:38:09 -0500 (0:00:00.782)       0:07:23.442 ***** 
changed: [managed-node3] => {
    "changed": true,
    "checksum": "cab519df8c21e60fd06ac780e2c7bd41ad441042",
    "dest": "/etc/systemd/system/tangd.socket.d/override.conf",
    "gid": 0,
    "group": "root",
    "md5sum": "fc727969e0bd264a9cc7f9c6bc56714c",
    "mode": "0644",
    "owner": "root",
    "secontext": "system_u:object_r:tangd_unit_file_t:s0",
    "size": 90,
    "src": "/root/.ansible/tmp/ansible-tmp-1739057889.3644252-195568-270541298237609/.source.conf",
    "state": "file",
    "uid": 0
}

TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53
Saturday 08 February 2025  18:38:10 -0500 (0:00:01.279)       0:07:24.721 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__nbde_server_port_changed": true
    },
    "changed": false
}

TASK [Ensure the desired port is added to firewalld] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57
Saturday 08 February 2025  18:38:10 -0500 (0:00:00.097)       0:07:24.819 ***** 
included: fedora.linux_system_roles.firewall for managed-node3

TASK [fedora.linux_system_roles.firewall : Setup firewalld] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2
Saturday 08 February 2025  18:38:10 -0500 (0:00:00.314)       0:07:25.133 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node3

TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2
Saturday 08 February 2025  18:38:11 -0500 (0:00:00.177)       0:07:25.310 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Check if system is ostree] **********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10
Saturday 08 February 2025  18:38:11 -0500 (0:00:00.188)       0:07:25.499 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15
Saturday 08 February 2025  18:38:11 -0500 (0:00:00.615)       0:07:26.114 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__firewall_is_ostree": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22
Saturday 08 February 2025  18:38:12 -0500 (0:00:00.142)       0:07:26.257 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "exists": false
    }
}

TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27
Saturday 08 February 2025  18:38:12 -0500 (0:00:00.579)       0:07:26.836 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__firewall_is_transactional": false
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Install firewalld] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31
Saturday 08 February 2025  18:38:12 -0500 (0:00:00.063)       0:07:26.900 ***** 
ok: [managed-node3] => {
    "changed": false,
    "rc": 0,
    "results": []
}

MSG:

Nothing to do
lsrpackages: firewalld

TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43
Saturday 08 February 2025  18:38:14 -0500 (0:00:01.451)       0:07:28.351 ***** 
skipping: [managed-node3] => {
    "false_condition": "__firewall_is_transactional | d(false)"
}

TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48
Saturday 08 February 2025  18:38:14 -0500 (0:00:00.123)       0:07:28.475 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53
Saturday 08 February 2025  18:38:14 -0500 (0:00:00.108)       0:07:28.584 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_is_transactional | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Collect service facts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5
Saturday 08 February 2025  18:38:14 -0500 (0:00:00.077)       0:07:28.662 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9
Saturday 08 February 2025  18:38:14 -0500 (0:00:00.094)       0:07:28.756 ***** 
skipping: [managed-node3] => (item=nftables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "nftables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=iptables)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "iptables",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=ufw)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "firewall_disable_conflicting_services | bool",
    "item": "ufw",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22
Saturday 08 February 2025  18:38:14 -0500 (0:00:00.091)       0:07:28.847 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": "firewalld",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "dbus-broker.service polkit.service dbus.socket system.slice sysinit.target basic.target",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target network-pre.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ip6tables.service shutdown.target ipset.service iptables.service ebtables.service",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DeviceAllow": "char-rtc r",
        "DevicePolicy": "closed",
        "Documentation": "\"man:firewalld(1)\"",
        "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3893915648",
        "EffectiveMemoryMax": "3893915648",
        "EffectiveTasksMax": "4417",
        "Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "14724",
        "LimitNPROCSoft": "14724",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "14724",
        "LimitSIGPENDINGSoft": "14724",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "yes",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3335819264",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "yes",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "yes",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "yes",
        "ProtectControlGroups": "yes",
        "ProtectHome": "yes",
        "ProtectHostname": "yes",
        "ProtectKernelLogs": "yes",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "yes",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "sysinit.target dbus.socket system.slice",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "yes",
        "RestrictSUIDSGID": "yes",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallArchitectures": "native",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "4417",
        "TimeoutAbortUSec": "45s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "45s",
        "TimeoutStopFailureMode": "abort",
        "TimeoutStopUSec": "45s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28
Saturday 08 February 2025  18:38:15 -0500 (0:00:00.650)       0:07:29.498 ***** 
changed: [managed-node3] => {
    "changed": true,
    "enabled": true,
    "name": "firewalld",
    "state": "started",
    "status": {
        "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "dbus.socket dbus-broker.service basic.target sysinit.target system.slice polkit.service",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Before": "shutdown.target network-pre.target",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "BusName": "org.fedoraproject.FirewallD1",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "yes",
        "CanIsolate": "no",
        "CanReload": "yes",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CleanResult": "success",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "ipset.service iptables.service shutdown.target ebtables.service ip6tables.service",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "Delegate": "no",
        "Description": "firewalld - dynamic firewall daemon",
        "DeviceAllow": "char-rtc r",
        "DevicePolicy": "closed",
        "Documentation": "\"man:firewalld(1)\"",
        "DropInPaths": "/usr/lib/systemd/system/service.d/10-timeout-abort.conf /usr/lib/systemd/system/service.d/50-keep-warm.conf",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3893915648",
        "EffectiveMemoryMax": "3893915648",
        "EffectiveTasksMax": "4417",
        "Environment": "SYSTEMD_SLEEP_FREEZE_USER_SESSIONS=0",
        "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)",
        "ExecMainCode": "0",
        "ExecMainExitTimestampMonotonic": "0",
        "ExecMainHandoffTimestampMonotonic": "0",
        "ExecMainPID": "0",
        "ExecMainStartTimestampMonotonic": "0",
        "ExecMainStatus": "0",
        "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExitType": "main",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorStoreMax": "0",
        "FileDescriptorStorePreserve": "restart",
        "FinalKillSignal": "9",
        "FragmentPath": "/usr/lib/systemd/system/firewalld.service",
        "FreezerState": "running",
        "GID": "[not set]",
        "GuessMainPID": "yes",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "Id": "firewalld.service",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeyringMode": "private",
        "KillMode": "mixed",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "14724",
        "LimitNPROCSoft": "14724",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "14724",
        "LimitSIGPENDINGSoft": "14724",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "LoadState": "loaded",
        "LockPersonality": "yes",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "MainPID": "0",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3326832640",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "yes",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NFileDescriptorStore": "0",
        "NRestarts": "0",
        "NUMAPolicy": "n/a",
        "Names": "firewalld.service",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "NotifyAccess": "none",
        "OOMPolicy": "stop",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "Perpetual": "no",
        "PrivateDevices": "yes",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "yes",
        "ProtectControlGroups": "yes",
        "ProtectHome": "yes",
        "ProtectHostname": "yes",
        "ProtectKernelLogs": "yes",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "yes",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "ReloadResult": "success",
        "ReloadSignal": "1",
        "RemainAfterExit": "no",
        "RemoveIPC": "no",
        "Requires": "system.slice sysinit.target dbus.socket",
        "Restart": "no",
        "RestartKillSignal": "15",
        "RestartMaxDelayUSec": "infinity",
        "RestartMode": "normal",
        "RestartSteps": "0",
        "RestartUSec": "100ms",
        "RestartUSecNext": "100ms",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "yes",
        "RestrictSUIDSGID": "yes",
        "Result": "success",
        "RootDirectoryStartOnly": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "RuntimeMaxUSec": "infinity",
        "RuntimeRandomizedExtraUSec": "0",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "StandardError": "null",
        "StandardInput": "null",
        "StandardOutput": "null",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StatusErrno": "0",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallArchitectures": "native",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "4417",
        "TimeoutAbortUSec": "45s",
        "TimeoutCleanUSec": "infinity",
        "TimeoutStartFailureMode": "terminate",
        "TimeoutStartUSec": "45s",
        "TimeoutStopFailureMode": "abort",
        "TimeoutStopUSec": "45s",
        "TimerSlackNSec": "50000",
        "Transient": "no",
        "Type": "dbus",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "enabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "Wants": "network-pre.target",
        "WatchdogSignal": "6",
        "WatchdogTimestampMonotonic": "0",
        "WatchdogUSec": "infinity"
    }
}

TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34
Saturday 08 February 2025  18:38:16 -0500 (0:00:01.253)       0:07:30.751 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__firewall_previous_replaced": false,
        "__firewall_python_cmd": "/usr/bin/python3",
        "__firewall_report_changed": true
    },
    "changed": false
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43
Saturday 08 February 2025  18:38:16 -0500 (0:00:00.295)       0:07:31.047 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55
Saturday 08 February 2025  18:38:16 -0500 (0:00:00.112)       0:07:31.160 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Configure firewall] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71
Saturday 08 February 2025  18:38:17 -0500 (0:00:00.110)       0:07:31.271 ***** 
changed: [managed-node3] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => {
    "__firewall_changed": true,
    "ansible_loop_var": "item",
    "changed": true,
    "item": {
        "immediate": true,
        "permanent": true,
        "port": "7500/tcp",
        "state": "enabled",
        "zone": "public"
    }
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config information] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120
Saturday 08 February 2025  18:38:17 -0500 (0:00:00.864)       0:07:32.136 ***** 
skipping: [managed-node3] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True})  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "'detailed' in fw[0]",
    "item": {
        "immediate": true,
        "permanent": true,
        "port": "7500/tcp",
        "state": "enabled",
        "zone": "public"
    },
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => {
    "changed": false
}

MSG:

All items skipped

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130
Saturday 08 February 2025  18:38:18 -0500 (0:00:00.132)       0:07:32.268 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "'detailed' in fw[0]",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139
Saturday 08 February 2025  18:38:18 -0500 (0:00:00.131)       0:07:32.400 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144
Saturday 08 February 2025  18:38:18 -0500 (0:00:00.119)       0:07:32.519 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "firewall == None or firewall | length == 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153
Saturday 08 February 2025  18:38:18 -0500 (0:00:00.128)       0:07:32.647 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Calculate what has changed] *********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163
Saturday 08 February 2025  18:38:18 -0500 (0:00:00.104)       0:07:32.752 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__firewall_previous_replaced | bool",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.firewall : Show diffs] *************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169
Saturday 08 February 2025  18:38:18 -0500 (0:00:00.076)       0:07:32.828 ***** 
skipping: [managed-node3] => {
    "false_condition": "__firewall_previous_replaced | bool"
}

TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34
Saturday 08 February 2025  18:38:18 -0500 (0:00:00.140)       0:07:32.969 ***** 
ok: [managed-node3] => {
    "changed": false,
    "name": null,
    "status": {}
}

TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39
Saturday 08 February 2025  18:38:19 -0500 (0:00:00.914)       0:07:33.883 ***** 
changed: [managed-node3] => (item=tangd.socket) => {
    "ansible_loop_var": "item",
    "changed": true,
    "enabled": true,
    "item": "tangd.socket",
    "name": "tangd.socket",
    "state": "started",
    "status": {
        "Accept": "yes",
        "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0",
        "ActiveEnterTimestampMonotonic": "0",
        "ActiveExitTimestampMonotonic": "0",
        "ActiveState": "inactive",
        "After": "system.slice sysinit.target systemd-journald.socket",
        "AllowIsolate": "no",
        "AssertResult": "no",
        "AssertTimestampMonotonic": "0",
        "Backlog": "2147483647",
        "Before": "shutdown.target sockets.target",
        "BindIPv6Only": "default",
        "BlockIOAccounting": "no",
        "BlockIOWeight": "[not set]",
        "Broadcast": "no",
        "CPUAccounting": "yes",
        "CPUAffinityFromNUMA": "no",
        "CPUQuotaPerSecUSec": "infinity",
        "CPUQuotaPeriodUSec": "infinity",
        "CPUSchedulingPolicy": "0",
        "CPUSchedulingPriority": "0",
        "CPUSchedulingResetOnFork": "no",
        "CPUShares": "[not set]",
        "CPUUsageNSec": "[not set]",
        "CPUWeight": "[not set]",
        "CacheDirectoryMode": "0755",
        "CanFreeze": "no",
        "CanIsolate": "no",
        "CanReload": "no",
        "CanStart": "yes",
        "CanStop": "yes",
        "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore",
        "CollectMode": "inactive",
        "ConditionResult": "no",
        "ConditionTimestampMonotonic": "0",
        "ConfigurationDirectoryMode": "0755",
        "Conflicts": "shutdown.target",
        "ControlGroupId": "0",
        "ControlPID": "0",
        "CoredumpFilter": "0x33",
        "CoredumpReceive": "no",
        "DefaultDependencies": "yes",
        "DefaultMemoryLow": "0",
        "DefaultMemoryMin": "0",
        "DefaultStartupMemoryLow": "0",
        "DeferAcceptUSec": "0",
        "Delegate": "no",
        "Description": "Tang Server socket",
        "DevicePolicy": "auto",
        "DirectoryMode": "0755",
        "Documentation": "\"man:tang(8)\"",
        "DropInPaths": "/etc/systemd/system/tangd.socket.d/override.conf",
        "DynamicUser": "no",
        "EffectiveMemoryHigh": "3893915648",
        "EffectiveMemoryMax": "3893915648",
        "EffectiveTasksMax": "4417",
        "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
        "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "FailureAction": "none",
        "FileDescriptorName": "tangd.socket",
        "FinalKillSignal": "9",
        "FlushPending": "no",
        "FragmentPath": "/usr/lib/systemd/system/tangd.socket",
        "FreeBind": "no",
        "FreezerState": "running",
        "GID": "[not set]",
        "IOAccounting": "no",
        "IOReadBytes": "[not set]",
        "IOReadOperations": "[not set]",
        "IOSchedulingClass": "2",
        "IOSchedulingPriority": "4",
        "IOWeight": "[not set]",
        "IOWriteBytes": "[not set]",
        "IOWriteOperations": "[not set]",
        "IPAccounting": "no",
        "IPEgressBytes": "[no data]",
        "IPEgressPackets": "[no data]",
        "IPIngressBytes": "[no data]",
        "IPIngressPackets": "[no data]",
        "IPTOS": "-1",
        "IPTTL": "-1",
        "Id": "tangd.socket",
        "IgnoreOnIsolate": "no",
        "IgnoreSIGPIPE": "yes",
        "InactiveEnterTimestampMonotonic": "0",
        "InactiveExitTimestampMonotonic": "0",
        "JobRunningTimeoutUSec": "infinity",
        "JobTimeoutAction": "none",
        "JobTimeoutUSec": "infinity",
        "KeepAlive": "no",
        "KeepAliveIntervalUSec": "0",
        "KeepAliveProbes": "0",
        "KeepAliveTimeUSec": "0",
        "KeyringMode": "shared",
        "KillMode": "control-group",
        "KillSignal": "15",
        "LimitAS": "infinity",
        "LimitASSoft": "infinity",
        "LimitCORE": "infinity",
        "LimitCORESoft": "infinity",
        "LimitCPU": "infinity",
        "LimitCPUSoft": "infinity",
        "LimitDATA": "infinity",
        "LimitDATASoft": "infinity",
        "LimitFSIZE": "infinity",
        "LimitFSIZESoft": "infinity",
        "LimitLOCKS": "infinity",
        "LimitLOCKSSoft": "infinity",
        "LimitMEMLOCK": "8388608",
        "LimitMEMLOCKSoft": "8388608",
        "LimitMSGQUEUE": "819200",
        "LimitMSGQUEUESoft": "819200",
        "LimitNICE": "0",
        "LimitNICESoft": "0",
        "LimitNOFILE": "524288",
        "LimitNOFILESoft": "1024",
        "LimitNPROC": "14724",
        "LimitNPROCSoft": "14724",
        "LimitRSS": "infinity",
        "LimitRSSSoft": "infinity",
        "LimitRTPRIO": "0",
        "LimitRTPRIOSoft": "0",
        "LimitRTTIME": "infinity",
        "LimitRTTIMESoft": "infinity",
        "LimitSIGPENDING": "14724",
        "LimitSIGPENDINGSoft": "14724",
        "LimitSTACK": "infinity",
        "LimitSTACKSoft": "8388608",
        "Listen": "[::]:7500 (Stream)",
        "LoadState": "loaded",
        "LockPersonality": "no",
        "LogLevelMax": "-1",
        "LogRateLimitBurst": "0",
        "LogRateLimitIntervalUSec": "0",
        "LogsDirectoryMode": "0755",
        "ManagedOOMMemoryPressure": "auto",
        "ManagedOOMMemoryPressureLimit": "0",
        "ManagedOOMPreference": "none",
        "ManagedOOMSwap": "auto",
        "Mark": "-1",
        "MaxConnections": "64",
        "MaxConnectionsPerSource": "0",
        "MemoryAccounting": "yes",
        "MemoryAvailable": "3331543040",
        "MemoryCurrent": "[not set]",
        "MemoryDenyWriteExecute": "no",
        "MemoryHigh": "infinity",
        "MemoryKSM": "no",
        "MemoryLimit": "infinity",
        "MemoryLow": "0",
        "MemoryMax": "infinity",
        "MemoryMin": "0",
        "MemoryPeak": "[not set]",
        "MemoryPressureThresholdUSec": "200ms",
        "MemoryPressureWatch": "auto",
        "MemorySwapCurrent": "[not set]",
        "MemorySwapMax": "infinity",
        "MemorySwapPeak": "[not set]",
        "MemoryZSwapCurrent": "[not set]",
        "MemoryZSwapMax": "infinity",
        "MemoryZSwapWriteback": "yes",
        "MessageQueueMaxMessages": "0",
        "MessageQueueMessageSize": "0",
        "MountAPIVFS": "no",
        "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "NAccepted": "0",
        "NConnections": "0",
        "NRefused": "0",
        "NUMAPolicy": "n/a",
        "Names": "tangd.socket",
        "NeedDaemonReload": "no",
        "Nice": "0",
        "NoDelay": "no",
        "NoNewPrivileges": "no",
        "NonBlocking": "no",
        "OOMScoreAdjust": "0",
        "OnFailureJobMode": "replace",
        "OnSuccessJobMode": "fail",
        "PassCredentials": "no",
        "PassFileDescriptorsToExec": "no",
        "PassPacketInfo": "no",
        "PassSecurity": "no",
        "Perpetual": "no",
        "PipeSize": "0",
        "PollLimitBurst": "150",
        "PollLimitIntervalUSec": "2s",
        "Priority": "-1",
        "PrivateDevices": "no",
        "PrivateIPC": "no",
        "PrivateMounts": "no",
        "PrivateNetwork": "no",
        "PrivateTmp": "no",
        "PrivateUsers": "no",
        "ProcSubset": "all",
        "ProtectClock": "no",
        "ProtectControlGroups": "no",
        "ProtectHome": "no",
        "ProtectHostname": "no",
        "ProtectKernelLogs": "no",
        "ProtectKernelModules": "no",
        "ProtectKernelTunables": "no",
        "ProtectProc": "default",
        "ProtectSystem": "no",
        "ReceiveBuffer": "0",
        "RefuseManualStart": "no",
        "RefuseManualStop": "no",
        "RemoveIPC": "no",
        "RemoveOnStop": "no",
        "Requires": "sysinit.target system.slice",
        "RestartKillSignal": "15",
        "RestrictNamespaces": "no",
        "RestrictRealtime": "no",
        "RestrictSUIDSGID": "no",
        "Result": "success",
        "ReusePort": "no",
        "RootEphemeral": "no",
        "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent",
        "RuntimeDirectoryMode": "0755",
        "RuntimeDirectoryPreserve": "no",
        "SameProcessGroup": "no",
        "SecureBits": "0",
        "SendBuffer": "0",
        "SendSIGHUP": "no",
        "SendSIGKILL": "yes",
        "SetLoginEnvironment": "no",
        "Slice": "system.slice",
        "SocketMode": "0666",
        "SocketProtocol": "0",
        "StandardError": "inherit",
        "StandardInput": "null",
        "StandardOutput": "journal",
        "StartLimitAction": "none",
        "StartLimitBurst": "5",
        "StartLimitIntervalUSec": "10s",
        "StartupBlockIOWeight": "[not set]",
        "StartupCPUShares": "[not set]",
        "StartupCPUWeight": "[not set]",
        "StartupIOWeight": "[not set]",
        "StartupMemoryHigh": "infinity",
        "StartupMemoryLow": "0",
        "StartupMemoryMax": "infinity",
        "StartupMemorySwapMax": "infinity",
        "StartupMemoryZSwapMax": "infinity",
        "StateChangeTimestampMonotonic": "0",
        "StateDirectoryMode": "0755",
        "StopWhenUnneeded": "no",
        "SubState": "dead",
        "SuccessAction": "none",
        "SurviveFinalKillSignal": "no",
        "SyslogFacility": "3",
        "SyslogLevel": "6",
        "SyslogLevelPrefix": "yes",
        "SyslogPriority": "30",
        "SystemCallErrorNumber": "2147483646",
        "TTYReset": "no",
        "TTYVHangup": "no",
        "TTYVTDisallocate": "no",
        "TasksAccounting": "yes",
        "TasksCurrent": "[not set]",
        "TasksMax": "4417",
        "TimeoutCleanUSec": "infinity",
        "TimeoutUSec": "45s",
        "TimerSlackNSec": "50000",
        "Timestamping": "off",
        "Transient": "no",
        "Transparent": "no",
        "TriggerLimitBurst": "200",
        "TriggerLimitIntervalUSec": "2s",
        "UID": "[not set]",
        "UMask": "0022",
        "UnitFilePreset": "disabled",
        "UnitFileState": "disabled",
        "UtmpMode": "init",
        "WatchdogSignal": "6",
        "Writable": "no"
    }
}

TASK [Create encrypted Stratis pool with Clevis/Tang] **************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:240
Saturday 08 February 2025  18:38:20 -0500 (0:00:01.085)       0:07:34.969 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:38:20 -0500 (0:00:00.169)       0:07:35.138 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.086)       0:07:35.225 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.083)       0:07:35.308 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.121)       0:07:35.430 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.059)       0:07:35.489 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.061)       0:07:35.551 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.094)       0:07:35.645 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.067)       0:07:35.713 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.214)       0:07:35.928 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.061)       0:07:35.989 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": "sda",
            "encryption": true,
            "encryption_clevis_pin": "tang",
            "encryption_password": "yabbadabbadoo",
            "encryption_tang_url": "localhost:7500",
            "name": "foo",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:38:21 -0500 (0:00:00.106)       0:07:36.096 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:38:22 -0500 (0:00:00.100)       0:07:36.196 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:38:22 -0500 (0:00:00.065)       0:07:36.261 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:38:22 -0500 (0:00:00.074)       0:07:36.336 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:38:22 -0500 (0:00:00.058)       0:07:36.394 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:38:22 -0500 (0:00:00.060)       0:07:36.454 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:38:22 -0500 (0:00:00.089)       0:07:36.543 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:38:22 -0500 (0:00:00.049)       0:07:36.593 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "create format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        },
        {
            "action": "create device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0",
        "/dev/stratis/foo"
    ],
    "mounts": [],
    "packages": [
        "stratis-cli",
        "stratisd",
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": "tang",
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": "localhost:7500",
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:38:39 -0500 (0:00:17.522)       0:07:54.115 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:38:40 -0500 (0:00:00.117)       0:07:54.233 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057789.5975058,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057789.5965059,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057789.5965059,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:38:40 -0500 (0:00:00.486)       0:07:54.719 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.497)       0:07:55.216 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.080)       0:07:55.297 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "create format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            },
            {
                "action": "create device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0",
            "/dev/stratis/foo"
        ],
        "mounts": [],
        "packages": [
            "stratis-cli",
            "stratisd",
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": "tang",
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": "localhost:7500",
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.102)       0:07:55.399 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": "tang",
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": "localhost:7500",
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "present",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.098)       0:07:55.498 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.101)       0:07:55.600 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.133)       0:07:55.734 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.110)       0:07:55.844 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.121)       0:07:55.965 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:38:41 -0500 (0:00:00.133)       0:07:56.098 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:38:42 -0500 (0:00:00.095)       0:07:56.194 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:38:42 -0500 (0:00:00.655)       0:07:56.850 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:38:42 -0500 (0:00:00.130)       0:07:56.980 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:253
Saturday 08 February 2025  18:38:44 -0500 (0:00:01.308)       0:07:58.289 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:38:44 -0500 (0:00:00.202)       0:07:58.491 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": "tang",
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": "localhost:7500",
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "present",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:38:44 -0500 (0:00:00.118)       0:07:58.609 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:38:44 -0500 (0:00:00.111)       0:07:58.721 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-flex-mdv": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-flex-mdv",
            "size": "512M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-flex-thindata": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-flex-thindata",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-flex-thinmeta": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-flex-thinmeta",
            "size": "6M",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-physical-originsub": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-physical-originsub",
            "size": "10G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-thinpool-pool": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-b3a7dc18a57e4270ab1cfc5e66105e1c-thinpool-pool",
            "size": "9.5G",
            "type": "stratis",
            "uuid": ""
        },
        "/dev/mapper/stratis-1-private-b9bfcf25754545a586cb22f859497f70-crypt": {
            "fstype": "stratis",
            "label": "",
            "mountpoint": "",
            "name": "/dev/mapper/stratis-1-private-b9bfcf25754545a586cb22f859497f70-crypt",
            "size": "10G",
            "type": "crypt",
            "uuid": "b9bfcf25-7545-45a5-86cb-22f859497f70"
        },
        "/dev/sda": {
            "fstype": "crypto_LUKS",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": "c0edbcc0-1245-4287-b861-6eea531b1a82"
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:38:45 -0500 (0:00:00.620)       0:07:59.342 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:00.003531",
    "end": "2025-02-08 18:38:45.640282",
    "rc": 0,
    "start": "2025-02-08 18:38:45.636751"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:38:45 -0500 (0:00:00.645)       0:07:59.987 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003314",
    "end": "2025-02-08 18:38:46.293625",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:38:46.290311"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:38:46 -0500 (0:00:00.642)       0:08:00.630 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': 'tang', 'encryption_tang_url': 'localhost:7500', 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:38:46 -0500 (0:00:00.263)       0:08:00.893 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:38:46 -0500 (0:00:00.131)       0:08:01.024 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:38:46 -0500 (0:00:00.140)       0:08:01.165 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:38:47 -0500 (0:00:00.142)       0:08:01.307 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:38:47 -0500 (0:00:00.301)       0:08:01.608 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:38:47 -0500 (0:00:00.112)       0:08:01.721 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:38:47 -0500 (0:00:00.095)       0:08:01.816 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:38:47 -0500 (0:00:00.157)       0:08:01.974 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:38:47 -0500 (0:00:00.112)       0:08:02.086 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:38:48 -0500 (0:00:00.151)       0:08:02.238 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:38:48 -0500 (0:00:00.097)       0:08:02.335 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:38:48 -0500 (0:00:00.112)       0:08:02.448 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:38:48 -0500 (0:00:00.111)       0:08:02.559 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:38:48 -0500 (0:00:00.077)       0:08:02.637 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.618)       0:08:03.256 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.131)       0:08:03.387 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.381)       0:08:03.769 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.097)       0:08:03.866 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.095)       0:08:03.962 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.074)       0:08:04.037 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.069)       0:08:04.107 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:38:49 -0500 (0:00:00.063)       0:08:04.170 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.055)       0:08:04.225 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.060)       0:08:04.286 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.056)       0:08:04.342 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.085)       0:08:04.428 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.095)       0:08:04.524 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.105)       0:08:04.629 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.166)       0:08:04.796 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.067)       0:08:04.863 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.185)       0:08:05.049 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:38:50 -0500 (0:00:00.082)       0:08:05.132 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:38:51 -0500 (0:00:00.192)       0:08:05.324 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "1",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:38:51 -0500 (0:00:00.112)       0:08:05.437 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:38:51 -0500 (0:00:00.077)       0:08:05.514 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:38:51 -0500 (0:00:00.073)       0:08:05.588 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:38:51 -0500 (0:00:00.100)       0:08:05.688 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:38:51 -0500 (0:00:00.208)       0:08:05.897 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:38:51 -0500 (0:00:00.260)       0:08:06.158 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:38:52 -0500 (0:00:00.277)       0:08:06.435 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.376730",
    "end": "2025-02-08 18:38:53.126157",
    "rc": 0,
    "start": "2025-02-08 18:38:52.749427"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [
        {
            "available_actions": "fully_operational",
            "blockdevs": {
                "cachedevs": [],
                "datadevs": [
                    {
                        "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                        "clevis_config": {
                            "thp": "BrX8Um5O1igqdYb_TyASxhpEvBkNlNGIRKB8DKT9888",
                            "url": "localhost:7500"
                        },
                        "clevis_pin": "tang",
                        "in_use": true,
                        "key_description": "blivet-foo",
                        "path": "/dev/sda",
                        "size": "20938752 sectors",
                        "uuid": "b9bfcf25-7545-45a5-86cb-22f859497f70"
                    }
                ]
            },
            "filesystems": [],
            "fs_limit": 100,
            "name": "foo",
            "uuid": "b3a7dc18-a57e-4270-ab1c-fc5e66105e1c"
        }
    ],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:38:53 -0500 (0:00:01.045)       0:08:07.481 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [
                {
                    "available_actions": "fully_operational",
                    "blockdevs": {
                        "cachedevs": [],
                        "datadevs": [
                            {
                                "blksizes": "base: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes, crypt: BLKSSSZGET: 512 bytes, BLKPBSZGET: 512 bytes",
                                "clevis_config": {
                                    "thp": "BrX8Um5O1igqdYb_TyASxhpEvBkNlNGIRKB8DKT9888",
                                    "url": "localhost:7500"
                                },
                                "clevis_pin": "tang",
                                "in_use": true,
                                "key_description": "blivet-foo",
                                "path": "/dev/sda",
                                "size": "20938752 sectors",
                                "uuid": "b9bfcf25-7545-45a5-86cb-22f859497f70"
                            }
                        ]
                    },
                    "filesystems": [],
                    "fs_limit": 100,
                    "name": "foo",
                    "uuid": "b3a7dc18-a57e-4270-ab1c-fc5e66105e1c"
                }
            ],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:38:53 -0500 (0:00:00.191)       0:08:07.673 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:38:53 -0500 (0:00:00.151)       0:08:07.824 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:38:53 -0500 (0:00:00.213)       0:08:08.038 ***** 
ok: [managed-node3] => {
    "changed": false
}

MSG:

All assertions passed

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:38:54 -0500 (0:00:00.180)       0:08:08.218 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:38:54 -0500 (0:00:00.124)       0:08:08.344 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:38:54 -0500 (0:00:00.126)       0:08:08.471 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:38:54 -0500 (0:00:00.172)       0:08:08.644 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:38:54 -0500 (0:00:00.133)       0:08:08.778 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

TASK [Clean up] ****************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:256
Saturday 08 February 2025  18:38:54 -0500 (0:00:00.141)       0:08:08.919 ***** 
included: fedora.linux_system_roles.storage for managed-node3

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2
Saturday 08 February 2025  18:38:55 -0500 (0:00:00.403)       0:08:09.323 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2
Saturday 08 February 2025  18:38:55 -0500 (0:00:00.245)       0:08:09.568 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7
Saturday 08 February 2025  18:38:55 -0500 (0:00:00.201)       0:08:09.770 ***** 
skipping: [managed-node3] => (item=RedHat.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "RedHat.yml",
    "skip_reason": "Conditional result was False"
}
ok: [managed-node3] => (item=Fedora.yml) => {
    "ansible_facts": {
        "_storage_copr_support_packages": [
            "dnf-plugins-core"
        ],
        "blivet_package_list": [
            "python3-blivet",
            "libblockdev-crypto",
            "libblockdev-dm",
            "libblockdev-fs",
            "libblockdev-lvm",
            "libblockdev-mdraid",
            "libblockdev-swap",
            "stratisd",
            "stratis-cli",
            "{{ 'libblockdev-s390' if ansible_architecture == 's390x' else 'libblockdev' }}",
            "vdo"
        ]
    },
    "ansible_included_var_files": [
        "/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/vars/Fedora.yml"
    ],
    "ansible_loop_var": "item",
    "changed": false,
    "item": "Fedora.yml"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}
skipping: [managed-node3] => (item=Fedora_41.yml)  => {
    "ansible_loop_var": "item",
    "changed": false,
    "false_condition": "__vars_file is file",
    "item": "Fedora_41.yml",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if system is ostree] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25
Saturday 08 February 2025  18:38:55 -0500 (0:00:00.302)       0:08:10.072 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30
Saturday 08 February 2025  18:38:56 -0500 (0:00:00.155)       0:08:10.228 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "not __storage_is_ostree is defined",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5
Saturday 08 February 2025  18:38:56 -0500 (0:00:00.145)       0:08:10.373 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9
Saturday 08 February 2025  18:38:56 -0500 (0:00:00.122)       0:08:10.496 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13
Saturday 08 February 2025  18:38:56 -0500 (0:00:00.140)       0:08:10.637 ***** 
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node3

TASK [fedora.linux_system_roles.storage : Make sure blivet is available] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2
Saturday 08 February 2025  18:38:56 -0500 (0:00:00.263)       0:08:10.900 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Show storage_pools] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9
Saturday 08 February 2025  18:38:56 -0500 (0:00:00.198)       0:08:11.099 ***** 
ok: [managed-node3] => {
    "storage_pools": [
        {
            "disks": "sda",
            "name": "foo",
            "state": "absent",
            "type": "stratis"
        }
    ]
}

TASK [fedora.linux_system_roles.storage : Show storage_volumes] ****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14
Saturday 08 February 2025  18:38:57 -0500 (0:00:00.423)       0:08:11.522 ***** 
ok: [managed-node3] => {
    "storage_volumes": "VARIABLE IS NOT DEFINED!: 'storage_volumes' is undefined"
}

TASK [fedora.linux_system_roles.storage : Get required packages] ***************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19
Saturday 08 February 2025  18:38:57 -0500 (0:00:00.114)       0:08:11.636 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:31
Saturday 08 February 2025  18:38:57 -0500 (0:00:00.103)       0:08:11.740 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37
Saturday 08 February 2025  18:38:57 -0500 (0:00:00.124)       0:08:11.864 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Get service facts] *******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51
Saturday 08 February 2025  18:38:57 -0500 (0:00:00.109)       0:08:11.973 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_skip_checks is not defined or not \"service_facts\" in storage_skip_checks",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] *****
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:57
Saturday 08 February 2025  18:38:57 -0500 (0:00:00.098)       0:08:12.072 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_cryptsetup_services": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:63
Saturday 08 February 2025  18:38:58 -0500 (0:00:00.200)       0:08:12.272 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69
Saturday 08 February 2025  18:38:58 -0500 (0:00:00.146)       0:08:12.419 ***** 
changed: [managed-node3] => {
    "actions": [
        {
            "action": "destroy device",
            "device": "/dev/stratis/foo",
            "fs_type": null
        },
        {
            "action": "destroy format",
            "device": "/dev/sda",
            "fs_type": "stratis"
        }
    ],
    "changed": true,
    "crypts": [],
    "leaves": [
        "/dev/sda",
        "/dev/sdb",
        "/dev/sdc",
        "/dev/sdd",
        "/dev/sde",
        "/dev/sdf",
        "/dev/sdg",
        "/dev/sdh",
        "/dev/sdi",
        "/dev/xvda1",
        "/dev/xvda2",
        "/dev/zram0"
    ],
    "mounts": [],
    "packages": [
        "e2fsprogs"
    ],
    "pools": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": []
        }
    ],
    "volumes": []
}

TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:83
Saturday 08 February 2025  18:39:01 -0500 (0:00:03.569)       0:08:15.988 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_udevadm_trigger | d(false)",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:90
Saturday 08 February 2025  18:39:01 -0500 (0:00:00.080)       0:08:16.069 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739057789.5975058,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 8,
        "charset": "us-ascii",
        "checksum": "58c9cf35b6a5bb13136caa97ec2cf1f888ff31f6",
        "ctime": 1739057789.5965059,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 279322,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "text/plain",
        "mode": "0644",
        "mtime": 1739057789.5965059,
        "nlink": 1,
        "path": "/etc/fstab",
        "pw_name": "root",
        "readable": true,
        "rgrp": true,
        "roth": true,
        "rusr": true,
        "size": 1366,
        "uid": 0,
        "version": "3651791363",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:95
Saturday 08 February 2025  18:39:02 -0500 (0:00:00.486)       0:08:16.555 ***** 
ok: [managed-node3] => {
    "backup": "",
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:113
Saturday 08 February 2025  18:39:02 -0500 (0:00:00.461)       0:08:17.016 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Show blivet_output] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:119
Saturday 08 February 2025  18:39:02 -0500 (0:00:00.050)       0:08:17.066 ***** 
ok: [managed-node3] => {
    "blivet_output": {
        "actions": [
            {
                "action": "destroy device",
                "device": "/dev/stratis/foo",
                "fs_type": null
            },
            {
                "action": "destroy format",
                "device": "/dev/sda",
                "fs_type": "stratis"
            }
        ],
        "changed": true,
        "crypts": [],
        "failed": false,
        "leaves": [
            "/dev/sda",
            "/dev/sdb",
            "/dev/sdc",
            "/dev/sdd",
            "/dev/sde",
            "/dev/sdf",
            "/dev/sdg",
            "/dev/sdh",
            "/dev/sdi",
            "/dev/xvda1",
            "/dev/xvda2",
            "/dev/zram0"
        ],
        "mounts": [],
        "packages": [
            "e2fsprogs"
        ],
        "pools": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": []
            }
        ],
        "volumes": []
    }
}

TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:128
Saturday 08 February 2025  18:39:02 -0500 (0:00:00.062)       0:08:17.128 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pools_list": [
            {
                "disks": [
                    "sda"
                ],
                "encryption": true,
                "encryption_cipher": null,
                "encryption_clevis_pin": null,
                "encryption_key": null,
                "encryption_key_size": null,
                "encryption_luks_version": null,
                "encryption_password": null,
                "encryption_tang_thumbprint": null,
                "encryption_tang_url": null,
                "grow_to_fill": false,
                "name": "foo",
                "raid_chunk_size": null,
                "raid_device_count": null,
                "raid_level": null,
                "raid_metadata_version": null,
                "raid_spare_count": null,
                "shared": false,
                "state": "absent",
                "type": "stratis",
                "volumes": []
            }
        ]
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:132
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.064)       0:08:17.193 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_volumes_list": []
    },
    "changed": false
}

TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] **************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.059)       0:08:17.253 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:159
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.062)       0:08:17.316 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Set up new/current mounts] ***********
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:164
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.064)       0:08:17.380 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:175
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.125)       0:08:17.506 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:187
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.089)       0:08:17.595 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "blivet_output['mounts']",
    "skip_reason": "Conditional result was False"
}

TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.083)       0:08:17.679 ***** 
ok: [managed-node3] => {
    "changed": false,
    "stat": {
        "atime": 1739056062.7691786,
        "attr_flags": "e",
        "attributes": [
            "extents"
        ],
        "block_size": 4096,
        "blocks": 0,
        "charset": "binary",
        "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709",
        "ctime": 1739056060.1941664,
        "dev": 51714,
        "device_type": 0,
        "executable": false,
        "exists": true,
        "gid": 0,
        "gr_name": "root",
        "inode": 393219,
        "isblk": false,
        "ischr": false,
        "isdir": false,
        "isfifo": false,
        "isgid": false,
        "islnk": false,
        "isreg": true,
        "issock": false,
        "isuid": false,
        "mimetype": "inode/x-empty",
        "mode": "0600",
        "mtime": 1739056060.195025,
        "nlink": 1,
        "path": "/etc/crypttab",
        "pw_name": "root",
        "readable": true,
        "rgrp": false,
        "roth": false,
        "rusr": true,
        "size": 0,
        "uid": 0,
        "version": "3049710822",
        "wgrp": false,
        "woth": false,
        "writeable": true,
        "wusr": true,
        "xgrp": false,
        "xoth": false,
        "xusr": false
    }
}

TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] ***
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:200
Saturday 08 February 2025  18:39:03 -0500 (0:00:00.470)       0:08:18.150 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [fedora.linux_system_roles.storage : Update facts] ************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222
Saturday 08 February 2025  18:39:04 -0500 (0:00:00.048)       0:08:18.198 ***** 
ok: [managed-node3]

TASK [Verify role results] *****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:266
Saturday 08 February 2025  18:39:05 -0500 (0:00:01.011)       0:08:19.210 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node3

TASK [Print out pool information] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2
Saturday 08 February 2025  18:39:05 -0500 (0:00:00.226)       0:08:19.436 ***** 
ok: [managed-node3] => {
    "_storage_pools_list": [
        {
            "disks": [
                "sda"
            ],
            "encryption": true,
            "encryption_cipher": null,
            "encryption_clevis_pin": null,
            "encryption_key": null,
            "encryption_key_size": null,
            "encryption_luks_version": null,
            "encryption_password": null,
            "encryption_tang_thumbprint": null,
            "encryption_tang_url": null,
            "grow_to_fill": false,
            "name": "foo",
            "raid_chunk_size": null,
            "raid_device_count": null,
            "raid_level": null,
            "raid_metadata_version": null,
            "raid_spare_count": null,
            "shared": false,
            "state": "absent",
            "type": "stratis",
            "volumes": []
        }
    ]
}

TASK [Print out volume information] ********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7
Saturday 08 February 2025  18:39:05 -0500 (0:00:00.073)       0:08:19.510 ***** 
skipping: [managed-node3] => {
    "false_condition": "_storage_volumes_list | length > 0"
}

TASK [Collect info about the volumes.] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15
Saturday 08 February 2025  18:39:05 -0500 (0:00:00.084)       0:08:19.594 ***** 
ok: [managed-node3] => {
    "changed": false,
    "info": {
        "/dev/sda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sda",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdb": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdb",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdc": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdc",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdd": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdd",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sde": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sde",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdf": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdf",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdg": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdg",
            "size": "1T",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdh": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdh",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/sdi": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/sdi",
            "size": "10G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda",
            "size": "250G",
            "type": "disk",
            "uuid": ""
        },
        "/dev/xvda1": {
            "fstype": "",
            "label": "",
            "mountpoint": "",
            "name": "/dev/xvda1",
            "size": "1M",
            "type": "partition",
            "uuid": ""
        },
        "/dev/xvda2": {
            "fstype": "ext4",
            "label": "",
            "mountpoint": "/",
            "name": "/dev/xvda2",
            "size": "250G",
            "type": "partition",
            "uuid": "802f11fb-484f-40e8-bf89-92c463a340ef"
        },
        "/dev/zram0": {
            "fstype": "",
            "label": "",
            "mountpoint": "[SWAP]",
            "name": "/dev/zram0",
            "size": "3.6G",
            "type": "disk",
            "uuid": ""
        }
    }
}

TASK [Read the /etc/fstab file for volume existence] ***************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20
Saturday 08 February 2025  18:39:05 -0500 (0:00:00.458)       0:08:20.053 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/fstab"
    ],
    "delta": "0:00:01.004817",
    "end": "2025-02-08 18:39:07.244868",
    "rc": 0,
    "start": "2025-02-08 18:39:06.240051"
}

STDOUT:


# system_role:storage
#
# /etc/fstab
# Created by anaconda on Tue Feb  4 14:37:01 2025
#
# Accessible filesystems, by reference, are maintained under '/dev/disk/'.
# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.
#
# After editing this file, run 'systemctl daemon-reload' to update systemd
# units generated from this file.
#
UUID=802f11fb-484f-40e8-bf89-92c463a340ef /                       ext4    defaults        1 1
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0
ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr 0 0

TASK [Read the /etc/crypttab file] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25
Saturday 08 February 2025  18:39:07 -0500 (0:00:01.451)       0:08:21.504 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "cat",
        "/etc/crypttab"
    ],
    "delta": "0:00:00.003253",
    "end": "2025-02-08 18:39:07.695488",
    "failed_when_result": false,
    "rc": 0,
    "start": "2025-02-08 18:39:07.692235"
}

TASK [Verify the volumes listed in storage_pools were correctly managed] *******
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34
Saturday 08 February 2025  18:39:07 -0500 (0:00:00.455)       0:08:21.960 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node3 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': []})

TASK [Set _storage_pool_tests] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5
Saturday 08 February 2025  18:39:07 -0500 (0:00:00.110)       0:08:22.071 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_pool_tests": [
            "members",
            "volumes"
        ]
    },
    "changed": false
}

TASK [Get VG shared value status] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18
Saturday 08 February 2025  18:39:07 -0500 (0:00:00.060)       0:08:22.131 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that VG shared value checks out] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.055)       0:08:22.186 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify pool subset] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.057)       0:08:22.244 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node3 => (item=members)
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node3 => (item=volumes)

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.112)       0:08:22.357 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] ********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.064)       0:08:22.422 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Set pvs lvm length] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.073)       0:08:22.495 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set pool pvs] ************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.096)       0:08:22.591 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify PV count] *********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.102)       0:08:22.694 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.072)       0:08:22.766 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm'",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.059)       0:08:22.826 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption",
    "skip_reason": "Conditional result was False"
}

TASK [Set expected pv type] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.058)       0:08:22.884 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.raid_level",
    "skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ***********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:51
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.055)       0:08:22.939 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check that blivet supports PV grow to fill] ******************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:64
Saturday 08 February 2025  18:39:08 -0500 (0:00:00.043)       0:08:22.983 ***** 
ok: [managed-node3] => {
    "changed": false,
    "failed_when_result": false,
    "rc": 0
}

STDERR:

OpenSSH_9.9p1, OpenSSL 3.2.2 4 Jun 2024
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match not found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: configuration requests final Match pass
debug2: resolve_canonicalize: hostname 10.31.46.217 is address
debug1: re-parsing configuration
debug1: Reading configuration data /root/.ssh/config
debug1: Reading configuration data /etc/ssh/ssh_config
debug1: Reading configuration data /etc/ssh/ssh_config.d/20-systemd-ssh-proxy.conf
debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf
debug2: checking match for 'final all' host 10.31.46.217 originally 10.31.46.217
debug2: match found
debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config
debug1: auto-mux: Trying existing master at '/root/.ansible/cp/3f058d2ae1'
debug2: fd 3 setting O_NONBLOCK
debug2: mux_client_hello_exchange: master version 4
debug1: mux_client_request_session: master session id: 2
debug2: Received exit status from master 0
Shared connection to 10.31.46.217 closed.


TASK [Verify that PVs fill the whole devices when they should] *****************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:74
Saturday 08 February 2025  18:39:09 -0500 (0:00:00.480)       0:08:23.463 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check MD RAID] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:84
Saturday 08 February 2025  18:39:09 -0500 (0:00:00.059)       0:08:23.522 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node3

TASK [Get information about RAID] **********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8
Saturday 08 February 2025  18:39:09 -0500 (0:00:00.134)       0:08:23.657 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set active devices regex] ************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14
Saturday 08 February 2025  18:39:09 -0500 (0:00:00.115)       0:08:23.772 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set spare devices regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19
Saturday 08 February 2025  18:39:09 -0500 (0:00:00.246)       0:08:24.019 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md version regex] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24
Saturday 08 February 2025  18:39:09 -0500 (0:00:00.078)       0:08:24.097 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Set md chunk size regex] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29
Saturday 08 February 2025  18:39:09 -0500 (0:00:00.069)       0:08:24.166 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Parse the chunk size] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.065)       0:08:24.232 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID active devices count] *****************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.055)       0:08:24.288 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID spare devices count] ******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.059)       0:08:24.347 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID metadata version] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.055)       0:08:24.403 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Check RAID chunk size] ***************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.077)       0:08:24.480 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.raid_level != none",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variables used by tests] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.075)       0:08:24.556 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_md_active_devices_re": null,
        "storage_test_md_chunk_size_re": null,
        "storage_test_md_metadata_version_re": null,
        "storage_test_md_spare_devices_re": null
    },
    "changed": false
}

TASK [Check LVM RAID] **********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:87
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.072)       0:08:24.629 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node3

TASK [Validate pool member LVM RAID settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.111)       0:08:24.740 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Thin Pools] ********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:90
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.047)       0:08:24.788 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node3

TASK [Validate pool member thinpool settings] **********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.122)       0:08:24.910 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check member encryption] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:93
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.045)       0:08:24.955 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node3

TASK [Set test variables] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.106)       0:08:25.062 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_expected_crypttab_entries": "0",
        "_storage_test_expected_crypttab_key_file": "-"
    },
    "changed": false
}

TASK [Validate pool member LUKS settings] **************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10
Saturday 08 February 2025  18:39:10 -0500 (0:00:00.071)       0:08:25.134 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Validate pool member crypttab entries] ***********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17
Saturday 08 February 2025  18:39:11 -0500 (0:00:00.048)       0:08:25.182 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clear test variables] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24
Saturday 08 February 2025  18:39:11 -0500 (0:00:00.043)       0:08:25.226 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_storage_test_crypttab_entries": null,
        "_storage_test_crypttab_key_file": null
    },
    "changed": false
}

TASK [Check VDO] ***************************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:96
Saturday 08 February 2025  18:39:11 -0500 (0:00:00.061)       0:08:25.287 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node3

TASK [Validate pool member VDO settings] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2
Saturday 08 February 2025  18:39:11 -0500 (0:00:00.118)       0:08:25.405 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Check Stratis] ***********************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:99
Saturday 08 February 2025  18:39:11 -0500 (0:00:00.077)       0:08:25.482 ***** 
included: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node3

TASK [Run 'stratis report'] ****************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6
Saturday 08 February 2025  18:39:11 -0500 (0:00:00.292)       0:08:25.775 ***** 
ok: [managed-node3] => {
    "changed": false,
    "cmd": [
        "stratis",
        "report"
    ],
    "delta": "0:00:00.377364",
    "end": "2025-02-08 18:39:12.341693",
    "rc": 0,
    "start": "2025-02-08 18:39:11.964329"
}

STDOUT:

{
    "name_to_pool_uuid_map": {},
    "partially_constructed_pools": [],
    "path_to_ids_map": {},
    "pools": [],
    "stopped_pools": []
}

TASK [Get information about Stratis] *******************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:11
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.827)       0:08:26.602 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "_stratis_pool_info": {
            "name_to_pool_uuid_map": {},
            "partially_constructed_pools": [],
            "path_to_ids_map": {},
            "pools": [],
            "stopped_pools": []
        }
    },
    "changed": false
}

TASK [Verify that the pools was created] ***************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.071)       0:08:26.674 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that encryption is correctly set] *********************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:25
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.058)       0:08:26.733 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Verify that Clevis/Tang encryption is correctly set] *********************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:34
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.056)       0:08:26.789 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "false_condition": "storage_test_pool.state == 'present'",
    "skip_reason": "Conditional result was False"
}

TASK [Reset variable used by test] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:44
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.058)       0:08:26.848 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_stratis_report": null
    },
    "changed": false
}

TASK [Clean up test variables] *************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:102
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.094)       0:08:26.942 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "__pvs_lvm_len": null,
        "_storage_test_expected_pv_count": null,
        "_storage_test_expected_pv_type": null,
        "_storage_test_pool_pvs": [],
        "_storage_test_pool_pvs_lvm": []
    },
    "changed": false
}

TASK [Verify the volumes] ******************************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.079)       0:08:27.022 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Verify the volumes with no pool were correctly managed] ******************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:44
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.063)       0:08:27.085 ***** 
skipping: [managed-node3] => {
    "changed": false,
    "skipped_reason": "No items in the list"
}

TASK [Clean up variable namespace] *********************************************
task path: /tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:54
Saturday 08 February 2025  18:39:12 -0500 (0:00:00.078)       0:08:27.163 ***** 
ok: [managed-node3] => {
    "ansible_facts": {
        "storage_test_blkinfo": null,
        "storage_test_crypttab": null,
        "storage_test_fstab": null
    },
    "changed": false
}

PLAY RECAP *********************************************************************
managed-node3              : ok=913  changed=25   unreachable=0    failed=0    skipped=1271 rescued=0    ignored=0   


TASKS RECAP ********************************************************************
Saturday 08 February 2025  18:39:13 -0500 (0:00:00.036)       0:08:27.200 ***** 
=============================================================================== 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 90.83s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 17.52s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 13.70s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 12.56s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 11.38s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 10.22s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.23s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 4.55s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.77s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.69s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.57s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.24s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.storage : Get service facts ------------------- 3.09s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:51 
fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.06s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:69 
fedora.linux_system_roles.selinux : Set an SELinux label on a port ------ 2.84s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 
fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 2.61s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 
fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 1.81s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 
fedora.linux_system_roles.storage : Update facts ------------------------ 1.57s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:222 
fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 1.57s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 
Ensure cryptsetup is present -------------------------------------------- 1.57s
/tmp/collections-xpt/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10